From 3a65d6236bd867c47aff30ee26e7a957b6747c73 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ole=20Gabor=20Huseb=C3=B8?= <oleghu@stud.ntnu.no>
Date: Mon, 24 May 2021 12:43:12 +0200
Subject: [PATCH] Adding the image_classifier_model_builder, and the model.
 Also adding example pictures

---
 .idea/workspace.xml                           | 11577 +---------------
 frog.jpg                                      |   Bin 0 -> 1465 bytes
 image_classifier.py                           |     2 +-
 .../Werkzeug-2.0.1.dist-info/INSTALLER        |     1 +
 .../Werkzeug-2.0.1.dist-info/LICENSE.rst      |    28 +
 .../Werkzeug-2.0.1.dist-info/METADATA         |   128 +
 .../Werkzeug-2.0.1.dist-info/RECORD           |   111 +
 .../Werkzeug-2.0.1.dist-info/WHEEL            |     5 +
 .../Werkzeug-2.0.1.dist-info/top_level.txt    |     1 +
 .../site-packages/cached_property.py          |   153 +
 venv/lib/python3.7/site-packages/cycler.py    |   558 +
 .../site-packages/distutils-precedence.pth    |     1 +
 .../google_auth-1.30.0-py3.9-nspkg.pth        |     1 +
 .../kiwisolver.cpython-37m-darwin.so          |   Bin 0 -> 217624 bytes
 .../matplotlib-3.4.2-py3.7-nspkg.pth          |     1 +
 .../protobuf-3.17.0-py3.7-nspkg.pth           |     1 +
 venv/lib/python3.7/site-packages/pylab.py     |     3 +
 venv/lib/python3.7/site-packages/pyparsing.py |  7107 ++++++++++
 venv/lib/python3.7/site-packages/six.py       |   982 ++
 .../INSTALLER                                 |     1 +
 .../METADATA                                  |    30 +
 .../RECORD                                    |   239 +
 .../WHEEL                                     |     6 +
 .../top_level.txt                             |     1 +
 .../tensorflow_estimator/__init__.py          |    19 +
 .../tensorflow_estimator/python/__init__.py   |     0
 .../python/estimator/__init__.py              |     0
 .../python/estimator/canned/__init__.py       |     0
 .../python/estimator/canned/baseline.py       |   651 +
 .../python/estimator/canned/boosted_trees.py  |  2391 ++++
 .../estimator/canned/boosted_trees_utils.py   |    94 +
 .../python/estimator/canned/dnn.py            |  1229 ++
 .../estimator/canned/dnn_linear_combined.py   |  1140 ++
 .../estimator/canned/dnn_testing_utils.py     |  2141 +++
 .../python/estimator/canned/head.py           |  1715 +++
 .../python/estimator/canned/kmeans.py         |   479 +
 .../python/estimator/canned/linear.py         |  1675 +++
 .../estimator/canned/linear_testing_utils.py  |  2239 +++
 .../python/estimator/canned/metric_keys.py    |    61 +
 .../python/estimator/canned/optimizers.py     |   146 +
 .../python/estimator/canned/parsing_utils.py  |   353 +
 .../estimator/canned/prediction_keys.py       |    37 +
 .../python/estimator/canned/rnn.py            |   691 +
 .../estimator/canned/saved_model_estimator.py |   496 +
 .../canned/timeseries/model_utils.py          |    76 +
 .../canned/timeseries/saved_model_utils.py    |   299 +
 .../canned/timeseries/state_management.py     |    98 +
 .../python/estimator/canned/v1/__init__.py    |     0
 .../canned/v1/dnn_testing_utils_v1.py         |  2127 +++
 .../canned/v1/linear_testing_utils_v1.py      |  2410 ++++
 .../python/estimator/early_stopping.py        |   602 +
 .../python/estimator/estimator.py             |  2389 ++++
 .../python/estimator/estimator_lib.py         |    74 +
 .../python/estimator/export/__init__.py       |     0
 .../python/estimator/export/export.py         |   484 +
 .../python/estimator/export/export_lib.py     |    48 +
 .../python/estimator/export/export_output.py  |    36 +
 .../python/estimator/export/function.py       |   400 +
 .../python/estimator/exporter.py              |   509 +
 .../python/estimator/extenders.py             |   123 +
 .../python/estimator/gc.py                    |   217 +
 .../python/estimator/head/__init__.py         |     0
 .../python/estimator/head/base_head.py        |   934 ++
 .../estimator/head/binary_class_head.py       |   601 +
 .../python/estimator/head/head_utils.py       |   102 +
 .../python/estimator/head/multi_class_head.py |   496 +
 .../python/estimator/head/multi_head.py       |   548 +
 .../python/estimator/head/multi_label_head.py |   591 +
 .../python/estimator/head/regression_head.py  |   583 +
 .../python/estimator/head/sequential_head.py  |   494 +
 .../python/estimator/hooks/__init__.py        |     0
 .../hooks/basic_session_run_hooks.py          |    49 +
 .../estimator/hooks/fake_summary_writer.py    |   143 +
 .../python/estimator/hooks/hooks.py           |   283 +
 .../estimator/hooks/session_run_hook.py       |   101 +
 .../python/estimator/inputs/__init__.py       |     0
 .../python/estimator/inputs/inputs.py         |    25 +
 .../python/estimator/inputs/numpy_io.py       |   224 +
 .../python/estimator/inputs/pandas_io.py      |   158 +
 .../estimator/inputs/queues/__init__.py       |     0
 .../inputs/queues/feeding_functions.py        |   504 +
 .../inputs/queues/feeding_queue_runner.py     |   184 +
 .../python/estimator/keras.py                 |   748 +
 .../python/estimator/mode_keys.py             |    24 +
 .../python/estimator/model_fn.py              |   633 +
 .../python/estimator/run_config.py            |   990 ++
 .../python/estimator/tools/__init__.py        |     0
 .../python/estimator/tools/analytics.py       |    37 +
 .../estimator/tools/checkpoint_converter.py   |   368 +
 .../python/estimator/tpu/__init__.py          |     0
 .../estimator/tpu/_tpu_estimator_embedding.py |   628 +
 .../python/estimator/tpu/error_handling.py    |   154 +
 .../tpu/iteration_count_estimator.py          |   201 +
 .../python/estimator/tpu/tpu_config.py        |   329 +
 .../python/estimator/tpu/tpu_context.py       |   911 ++
 .../python/estimator/tpu/tpu_estimator.py     |  4554 ++++++
 .../python/estimator/tpu/util.py              |    96 +
 .../python/estimator/training.py              |  1118 ++
 .../python/estimator/util.py                  |   114 +
 .../termcolor-1.1.0-py3.7.egg-info/PKG-INFO   |   133 +
 .../SOURCES.txt                               |     8 +
 .../dependency_links.txt                      |     1 +
 .../installed-files.txt                       |     6 +
 .../top_level.txt                             |     1 +
 venv/lib/python3.7/site-packages/termcolor.py |   168 +
 .../INSTALLER                                 |     1 +
 .../LICENSE                                   |   254 +
 .../METADATA                                  |    43 +
 .../RECORD                                    |     8 +
 .../typing_extensions-3.7.4.3.dist-info/WHEEL |     5 +
 .../top_level.txt                             |     1 +
 .../site-packages/typing_extensions.py        |  2168 +++
 .../urllib3-1.26.4.dist-info/DESCRIPTION.rst  |  1323 ++
 .../urllib3-1.26.4.dist-info/INSTALLER        |     1 +
 .../urllib3-1.26.4.dist-info/LICENSE.txt      |    21 +
 .../urllib3-1.26.4.dist-info/METADATA         |  1366 ++
 .../urllib3-1.26.4.dist-info/RECORD           |    86 +
 .../urllib3-1.26.4.dist-info/WHEEL            |     6 +
 .../urllib3-1.26.4.dist-info/metadata.json    |     1 +
 .../urllib3-1.26.4.dist-info/top_level.txt    |     1 +
 .../site-packages/urllib3/__init__.py         |    85 +
 .../site-packages/urllib3/_collections.py     |   337 +
 .../site-packages/urllib3/_version.py         |     2 +
 .../site-packages/urllib3/connection.py       |   539 +
 .../site-packages/urllib3/connectionpool.py   |  1067 ++
 .../site-packages/urllib3/contrib/__init__.py |     0
 .../urllib3/contrib/_appengine_environ.py     |    36 +
 .../contrib/_securetransport/__init__.py      |     0
 .../contrib/_securetransport/bindings.py      |   519 +
 .../contrib/_securetransport/low_level.py     |   396 +
 .../urllib3/contrib/appengine.py              |   314 +
 .../site-packages/urllib3/contrib/ntlmpool.py |   121 +
 .../urllib3/contrib/pyopenssl.py              |   509 +
 .../urllib3/contrib/securetransport.py        |   920 ++
 .../site-packages/urllib3/contrib/socks.py    |   216 +
 .../site-packages/urllib3/exceptions.py       |   323 +
 .../python3.7/site-packages/urllib3/fields.py |   274 +
 .../site-packages/urllib3/filepost.py         |    98 +
 .../urllib3/packages/__init__.py              |     5 +
 .../urllib3/packages/backports/__init__.py    |     0
 .../urllib3/packages/backports/makefile.py    |    51 +
 .../site-packages/urllib3/packages/six.py     |  1021 ++
 .../packages/ssl_match_hostname/__init__.py   |    22 +
 .../ssl_match_hostname/_implementation.py     |   160 +
 .../site-packages/urllib3/poolmanager.py      |   536 +
 .../site-packages/urllib3/request.py          |   170 +
 .../site-packages/urllib3/response.py         |   821 ++
 .../site-packages/urllib3/util/__init__.py    |    49 +
 .../site-packages/urllib3/util/connection.py  |   150 +
 .../site-packages/urllib3/util/proxy.py       |    56 +
 .../site-packages/urllib3/util/queue.py       |    22 +
 .../site-packages/urllib3/util/request.py     |   143 +
 .../site-packages/urllib3/util/response.py    |   107 +
 .../site-packages/urllib3/util/retry.py       |   602 +
 .../site-packages/urllib3/util/ssl_.py        |   474 +
 .../urllib3/util/ssltransport.py              |   221 +
 .../site-packages/urllib3/util/timeout.py     |   268 +
 .../site-packages/urllib3/util/url.py         |   430 +
 .../site-packages/urllib3/util/wait.py        |   153 +
 .../site-packages/werkzeug/__init__.py        |     6 +
 .../site-packages/werkzeug/_internal.py       |   626 +
 .../site-packages/werkzeug/_reloader.py       |   430 +
 .../site-packages/werkzeug/datastructures.py  |  3051 ++++
 .../site-packages/werkzeug/datastructures.pyi |   906 ++
 .../site-packages/werkzeug/debug/__init__.py  |   501 +
 .../site-packages/werkzeug/debug/console.py   |   211 +
 .../site-packages/werkzeug/debug/repr.py      |   284 +
 .../werkzeug/debug/shared/FONT_LICENSE        |    96 +
 .../werkzeug/debug/shared/ICON_LICENSE.md     |     6 +
 .../werkzeug/debug/shared/console.png         |   Bin 0 -> 507 bytes
 .../werkzeug/debug/shared/debugger.js         |   359 +
 .../werkzeug/debug/shared/less.png            |   Bin 0 -> 191 bytes
 .../werkzeug/debug/shared/more.png            |   Bin 0 -> 200 bytes
 .../werkzeug/debug/shared/source.png          |   Bin 0 -> 818 bytes
 .../werkzeug/debug/shared/style.css           |   163 +
 .../werkzeug/debug/shared/ubuntu.ttf          |   Bin 0 -> 70220 bytes
 .../site-packages/werkzeug/debug/tbtools.py   |   595 +
 .../site-packages/werkzeug/exceptions.py      |   943 ++
 .../site-packages/werkzeug/filesystem.py      |    55 +
 .../site-packages/werkzeug/formparser.py      |   495 +
 .../python3.7/site-packages/werkzeug/http.py  |  1388 ++
 .../python3.7/site-packages/werkzeug/local.py |   666 +
 .../werkzeug/middleware/__init__.py           |    22 +
 .../werkzeug/middleware/dispatcher.py         |    78 +
 .../werkzeug/middleware/http_proxy.py         |   230 +
 .../site-packages/werkzeug/middleware/lint.py |   420 +
 .../werkzeug/middleware/profiler.py           |   139 +
 .../werkzeug/middleware/proxy_fix.py          |   187 +
 .../werkzeug/middleware/shared_data.py        |   320 +
 .../python3.7/site-packages/werkzeug/py.typed |     0
 .../site-packages/werkzeug/routing.py         |  2332 ++++
 .../site-packages/werkzeug/sansio/__init__.py |     0
 .../werkzeug/sansio/multipart.py              |   260 +
 .../site-packages/werkzeug/sansio/request.py  |   548 +
 .../site-packages/werkzeug/sansio/response.py |   656 +
 .../site-packages/werkzeug/sansio/utils.py    |   142 +
 .../site-packages/werkzeug/security.py        |   247 +
 .../site-packages/werkzeug/serving.py         |  1079 ++
 .../python3.7/site-packages/werkzeug/test.py  |  1324 ++
 .../site-packages/werkzeug/testapp.py         |   240 +
 .../python3.7/site-packages/werkzeug/urls.py  |  1211 ++
 .../site-packages/werkzeug/user_agent.py      |    47 +
 .../site-packages/werkzeug/useragents.py      |   215 +
 .../python3.7/site-packages/werkzeug/utils.py |  1091 ++
 .../werkzeug/wrappers/__init__.py             |    16 +
 .../site-packages/werkzeug/wrappers/accept.py |    14 +
 .../site-packages/werkzeug/wrappers/auth.py   |    26 +
 .../werkzeug/wrappers/base_request.py         |    36 +
 .../werkzeug/wrappers/base_response.py        |    36 +
 .../werkzeug/wrappers/common_descriptors.py   |    26 +
 .../site-packages/werkzeug/wrappers/cors.py   |    26 +
 .../site-packages/werkzeug/wrappers/etag.py   |    26 +
 .../site-packages/werkzeug/wrappers/json.py   |    13 +
 .../werkzeug/wrappers/request.py              |   660 +
 .../werkzeug/wrappers/response.py             |   890 ++
 .../werkzeug/wrappers/user_agent.py           |    14 +
 .../python3.7/site-packages/werkzeug/wsgi.py  |   982 ++
 .../wheel-0.36.2.dist-info/INSTALLER          |     1 +
 .../wheel-0.36.2.dist-info/LICENSE.txt        |    22 +
 .../wheel-0.36.2.dist-info/METADATA           |    68 +
 .../wheel-0.36.2.dist-info/RECORD             |    40 +
 .../wheel-0.36.2.dist-info/WHEEL              |     6 +
 .../wheel-0.36.2.dist-info/entry_points.txt   |     6 +
 .../wheel-0.36.2.dist-info/top_level.txt      |     1 +
 .../python3.7/site-packages/wheel/__init__.py |     1 +
 .../python3.7/site-packages/wheel/__main__.py |    19 +
 .../site-packages/wheel/bdist_wheel.py        |   492 +
 .../site-packages/wheel/cli/__init__.py       |    88 +
 .../site-packages/wheel/cli/convert.py        |   269 +
 .../python3.7/site-packages/wheel/cli/pack.py |    79 +
 .../site-packages/wheel/cli/unpack.py         |    25 +
 .../site-packages/wheel/macosx_libfile.py     |   428 +
 .../python3.7/site-packages/wheel/metadata.py |   133 +
 .../python3.7/site-packages/wheel/pkginfo.py  |    43 +
 .../lib/python3.7/site-packages/wheel/util.py |    46 +
 .../site-packages/wheel/vendored/__init__.py  |     0
 .../wheel/vendored/packaging/__init__.py      |     0
 .../wheel/vendored/packaging/_typing.py       |    48 +
 .../wheel/vendored/packaging/tags.py          |   852 ++
 .../site-packages/wheel/wheelfile.py          |   169 +
 .../wrapt-1.12.1-py3.7.egg-info/PKG-INFO      |   167 +
 .../wrapt-1.12.1-py3.7.egg-info/SOURCES.txt   |    12 +
 .../dependency_links.txt                      |     1 +
 .../installed-files.txt                       |    13 +
 .../wrapt-1.12.1-py3.7.egg-info/top_level.txt |     1 +
 .../python3.7/site-packages/wrapt/__init__.py |    16 +
 .../wrapt/_wrappers.cpython-37m-darwin.so     |   Bin 0 -> 74040 bytes
 .../site-packages/wrapt/decorators.py         |   516 +
 .../python3.7/site-packages/wrapt/importer.py |   230 +
 .../python3.7/site-packages/wrapt/wrappers.py |   947 ++
 .../zipp-3.4.1.dist-info/INSTALLER            |     1 +
 .../zipp-3.4.1.dist-info/LICENSE              |    19 +
 .../zipp-3.4.1.dist-info/METADATA             |    54 +
 .../site-packages/zipp-3.4.1.dist-info/RECORD |     8 +
 .../site-packages/zipp-3.4.1.dist-info/WHEEL  |     5 +
 .../zipp-3.4.1.dist-info/top_level.txt        |     1 +
 venv/lib/python3.7/site-packages/zipp.py      |   314 +
 venv/pip-selfcheck.json                       |     1 +
 258 files changed, 98044 insertions(+), 11562 deletions(-)
 create mode 100644 frog.jpg
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/LICENSE.rst
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/cached_property.py
 create mode 100644 venv/lib/python3.7/site-packages/cycler.py
 create mode 100644 venv/lib/python3.7/site-packages/distutils-precedence.pth
 create mode 100644 venv/lib/python3.7/site-packages/google_auth-1.30.0-py3.9-nspkg.pth
 create mode 100755 venv/lib/python3.7/site-packages/kiwisolver.cpython-37m-darwin.so
 create mode 100644 venv/lib/python3.7/site-packages/matplotlib-3.4.2-py3.7-nspkg.pth
 create mode 100644 venv/lib/python3.7/site-packages/protobuf-3.17.0-py3.7-nspkg.pth
 create mode 100644 venv/lib/python3.7/site-packages/pylab.py
 create mode 100644 venv/lib/python3.7/site-packages/pyparsing.py
 create mode 100644 venv/lib/python3.7/site-packages/six.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/baseline.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_linear_combined.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_testing_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/kmeans.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_testing_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/metric_keys.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/optimizers.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/parsing_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/prediction_keys.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/rnn.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/saved_model_estimator.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/state_management.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/dnn_testing_utils_v1.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/linear_testing_utils_v1.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/early_stopping.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator_lib.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_lib.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_output.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/function.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/exporter.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/extenders.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/gc.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/base_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/binary_class_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/head_utils.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_class_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_label_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/regression_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/sequential_head.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/basic_session_run_hooks.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/fake_summary_writer.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/hooks.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/session_run_hook.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/inputs.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/numpy_io.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/pandas_io.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_functions.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_queue_runner.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/keras.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/mode_keys.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/model_fn.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/run_config.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/analytics.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/checkpoint_converter.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/_tpu_estimator_embedding.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/error_handling.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/iteration_count_estimator.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_config.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_context.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/util.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/training.py
 create mode 100644 venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/util.py
 create mode 100644 venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/PKG-INFO
 create mode 100644 venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/SOURCES.txt
 create mode 100644 venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/dependency_links.txt
 create mode 100644 venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/installed-files.txt
 create mode 100644 venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/termcolor.py
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/LICENSE
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/typing_extensions.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/DESCRIPTION.rst
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/LICENSE.txt
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/metadata.json
 create mode 100644 venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/_collections.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/_version.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/connection.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/connectionpool.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/_appengine_environ.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/contrib/socks.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/exceptions.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/fields.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/filepost.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/backports/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/six.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/poolmanager.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/request.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/response.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/connection.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/proxy.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/queue.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/request.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/response.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/retry.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/ssl_.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/ssltransport.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/timeout.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/url.py
 create mode 100644 venv/lib/python3.7/site-packages/urllib3/util/wait.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/_internal.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/_reloader.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/datastructures.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/datastructures.pyi
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/console.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/repr.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/FONT_LICENSE
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/ICON_LICENSE.md
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/console.png
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/debugger.js
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/less.png
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/more.png
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/source.png
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/style.css
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/shared/ubuntu.ttf
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/debug/tbtools.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/exceptions.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/filesystem.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/formparser.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/http.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/local.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/dispatcher.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/http_proxy.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/lint.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/profiler.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/proxy_fix.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/middleware/shared_data.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/py.typed
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/routing.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/sansio/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/sansio/multipart.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/sansio/request.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/sansio/response.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/sansio/utils.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/security.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/serving.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/test.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/testapp.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/urls.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/user_agent.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/useragents.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/utils.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/accept.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/auth.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/base_request.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/base_response.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/common_descriptors.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/cors.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/etag.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/json.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/request.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/response.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wrappers/user_agent.py
 create mode 100644 venv/lib/python3.7/site-packages/werkzeug/wsgi.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/LICENSE.txt
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/entry_points.txt
 create mode 100644 venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/wheel/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/__main__.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/bdist_wheel.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/cli/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/cli/convert.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/cli/pack.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/cli/unpack.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/macosx_libfile.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/metadata.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/pkginfo.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/util.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/vendored/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/vendored/packaging/__init__.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/vendored/packaging/_typing.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/vendored/packaging/tags.py
 create mode 100644 venv/lib/python3.7/site-packages/wheel/wheelfile.py
 create mode 100644 venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/PKG-INFO
 create mode 100644 venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/SOURCES.txt
 create mode 100644 venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/dependency_links.txt
 create mode 100644 venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/installed-files.txt
 create mode 100644 venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/wrapt/__init__.py
 create mode 100755 venv/lib/python3.7/site-packages/wrapt/_wrappers.cpython-37m-darwin.so
 create mode 100644 venv/lib/python3.7/site-packages/wrapt/decorators.py
 create mode 100644 venv/lib/python3.7/site-packages/wrapt/importer.py
 create mode 100644 venv/lib/python3.7/site-packages/wrapt/wrappers.py
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/INSTALLER
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/LICENSE
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/METADATA
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/RECORD
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/WHEEL
 create mode 100644 venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/top_level.txt
 create mode 100644 venv/lib/python3.7/site-packages/zipp.py
 create mode 100644 venv/pip-selfcheck.json

diff --git a/.idea/workspace.xml b/.idea/workspace.xml
index 81621f7b..5fd1e5cf 100644
--- a/.idea/workspace.xml
+++ b/.idea/workspace.xml
@@ -2,11571 +2,31 @@
 <project version="4">
   <component name="ChangeListManager">
     <list default="true" id="2a1baa92-27d8-43a9-8e6a-871dd8aa32ea" name="Default Changelist" comment="">
-      <change afterPath="$PROJECT_DIR$/image_classifier.model/keras_metadata.pb" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/image_classifier.model/saved_model.pb" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/image_classifier.model/variables/variables.data-00000-of-00001" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/image_classifier.model/variables/variables.index" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/chardetect" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/estimator_ckpt_converter" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/f2py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/f2py3" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/f2py3.7" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/google-oauthlib-tool" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/import_pb_to_tensorboard" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/markdown_py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-decrypt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-encrypt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-keygen" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-priv2pub" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-sign" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/pyrsa-verify" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/saved_model_cli" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/tensorboard" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/tf_upgrade_v2" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/tflite_convert" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/toco" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/toco_from_protos" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/bin/wheel" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Keras_Preprocessing-1.1.2.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/LICENSE.md" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Markdown-3.3.4.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libXau.6.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libfreetype.6.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libharfbuzz.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libjpeg.9.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/liblcms2.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/liblzma.5.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libopenjp2.2.4.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libpng16.16.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libtiff.5.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libwebp.7.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libwebpdemux.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libwebpmux.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libxcb.1.1.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/.dylibs/libz.1.2.11.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/BdfFontFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/BlpImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/BmpImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/BufrStubImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ContainerIO.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/CurImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/DcxImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/DdsImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/EpsImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ExifTags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/FitsStubImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/FliImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/FontFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/FpxImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/FtexImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GbrImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GdImageFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GifImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GimpGradientFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GimpPaletteFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/GribStubImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/Hdf5StubImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/IcnsImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/IcoImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/Image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageChops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageCms.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageColor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageDraw.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageDraw2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageEnhance.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageFilter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageFont.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageGrab.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageMath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageMode.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageMorph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageOps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImagePalette.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImagePath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageQt.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageSequence.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageShow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageStat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageTk.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageTransform.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImageWin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/ImtImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/IptcImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/Jpeg2KImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/JpegImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/JpegPresets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/McIdasImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/MicImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/MpegImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/MpoImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/MspImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PSDraw.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PaletteFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PalmImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PcdImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PcfFontFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PcxImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PdfImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PdfParser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PixarImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PngImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PpmImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PsdImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/PyAccess.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/SgiImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/SpiderImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/SunImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/TarIO.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/TgaImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/TiffImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/TiffTags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/WalImageFile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/WebPImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/WmfImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/XVThumbImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/XbmImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/XpmImagePlugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_binary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imaging.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imagingcms.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imagingft.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imagingmath.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imagingmorph.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_imagingtk.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_tkinter_finder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/_webp.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/PIL/features.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Pillow-8.2.0.dist-info/zip-safe" afterDir="false" />
+      <change afterPath="$PROJECT_DIR$/frog.jpg" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/INSTALLER" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/LICENSE.rst" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/METADATA" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/RECORD" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/WHEEL" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/_distutils_hack/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/_distutils_hack/override.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/_collections_abc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/_enum_module.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/app.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/command_name.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_argument_parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_defines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_flag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_flagvalues.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/_validators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/flags/argparse_flags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/logging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/logging/converter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/_bazel_selected_py3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/_bazelize_command.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/_parameterized_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/_pretty_print_reporter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/absltest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/flagsaver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/parameterized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/testing/xml_reporter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/third_party/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/third_party/unittest3_backport/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/third_party/unittest3_backport/case.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl/third_party/unittest3_backport/result.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/AUTHORS" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/absl_py-0.12.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/AUTHORS.rst" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse-1.6.3.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse/printer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/astunparse/unparser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/AUTHORS.rst" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property-1.5.2.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cached_property.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools-4.2.2.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/decorators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/fifo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/func.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/lfu.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/lru.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/mru.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/rr.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cachetools/ttl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi-2020.12.5.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi/cacert.pem" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/certifi/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/big5freq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/big5prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/chardistribution.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/charsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/cli/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/codingstatemachine.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/cp949prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/enums.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/escprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/escsm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/eucjpprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/euckrfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/euckrprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/euctwfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/euctwprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/gb2312freq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/gb2312prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/hebrewprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/jisfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/jpcntx.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langrussianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langthaimodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/latin1prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/mbcharsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/mbcsgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/mbcssm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/metadata/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/metadata/languages.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sjisprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/universaldetector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/utf8prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libaom.3.1.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libavcodec.58.134.100.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libavformat.58.76.100.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libavresample.4.0.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libavutil.56.70.100.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libbluray.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libcrypto.1.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libdav1d.5.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libffi.7.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libfontconfig.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libfreetype.6.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libgmp.10.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libgnutls.30.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libhogweed.6.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libidn2.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libintl.8.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/liblzma.5.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libmp3lame.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libnettle.8.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libogg.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libopencore-amrnb.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libopencore-amrwb.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libopenjp2.2.4.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libopus.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libp11-kit.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libpng16.16.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/librav1e.0.4.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libsnappy.1.1.8.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libsodium.23.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libsoxr.0.1.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libspeex.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libsrt.1.4.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libssl.1.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libswresample.3.9.100.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libswscale.5.9.100.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libtasn1.6.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libtheoradec.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libtheoraenc.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libunistring.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libvorbis.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libvorbisenc.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libwebp.7.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libwebpmux.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/.dylibs/libzmq.5.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/LICENSE-3RD-PARTY.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_eye.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_eye_tree_eyeglasses.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalcatface.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalcatface_extended.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalface_alt.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalface_alt2.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalface_alt_tree.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_frontalface_default.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_fullbody.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_lefteye_2splits.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_licence_plate_rus_16stages.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_lowerbody.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_profileface.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_righteye_2splits.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_russian_plate_number.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_smile.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/data/haarcascade_upperbody.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cv2/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/DESCRIPTION.rst" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/metadata.json" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler-0.10.0.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/cycler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/easter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/parser/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/parser/_parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/parser/isoparser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/relativedelta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/rrule.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tz/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tz/_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tz/_factories.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tz/tz.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tz/win.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/tzwin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/zoneinfo/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/dateutil/zoneinfo/rebuild.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/distutils-precedence.pth" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers-1.12.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers-1.12.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers-1.12.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers-1.12.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers-1.12.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/builder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/encode.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/number_types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/packer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/table.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/flatbuffers/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast-0.4.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast/ast2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast/ast3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast/astn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/gast/gast.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_cloud_sdk.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_credentials_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_default.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_default_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_jwt_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_oauth2client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/_service_account_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/app_engine.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/aws.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/compute_engine/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/compute_engine/_metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/compute_engine/credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/_cryptography_rsa.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/_python_rsa.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/es256.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/crypt/rsa.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/environment_vars.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/external_account.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/iam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/identity_pool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/impersonated_credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/jwt.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/_aiohttp_requests.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/_http_client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/_mtls_helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/mtls.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/requests.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/transport/urllib3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/auth/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_client_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_credentials_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_id_token_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_reauth_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/_service_account_async.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/challenges.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/id_token.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/reauth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/service_account.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/sts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/oauth2/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/any_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/api_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/compiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/compiler/plugin_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/descriptor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/descriptor_database.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/descriptor_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/descriptor_pool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/duration_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/empty_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/field_mask_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/_api_implementation.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/api_implementation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/containers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/enum_type_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/extension_dict.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/message_listener.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/python_message.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/type_checkers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/well_known_types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/internal/wire_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/json_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/message.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/message_factory.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/proto_builder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/pyext/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/pyext/_message.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/pyext/cpp_message.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/reflection.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/service.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/service_reflection.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/source_context_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/struct_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/symbol_database.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/text_encoding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/text_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/timestamp_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/type_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/util/json_format_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/util/json_format_proto3_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google/protobuf/wrappers_pb2.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0-py3.9-nspkg.pth" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/namespace_packages.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth-1.30.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib-0.4.4.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/flow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/interactive.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/tool/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_auth_oauthlib/tool/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/google_pasta-0.2.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_channel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_compression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_cython/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_cython/_credentials/roots.pem" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_cython/_cygrpc/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_cython/cygrpc.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_grpcio_metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_interceptor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_plugin_wrapping.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_runtime_protos.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_simple_stubs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/_utilities.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_base_call.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_base_channel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_base_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_call.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_channel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_interceptor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_typing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/aio/_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/_client_adaptations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/_metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/_server_adaptations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/implementations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/interfaces.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/beta/utilities.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/experimental/aio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/experimental/gevent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/experimental/session_cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/common/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/common/cardinality.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/common/style.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/abandonment.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/callable_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/future.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/logging_pool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/stream.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/foundation/stream_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/base/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/base/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/base/utilities.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/face/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/face/face.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpc/framework/interfaces/face/utilities.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/grpcio-1.34.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/AUTHORS" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py-3.1.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/.dylibs/libhdf5.200.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/.dylibs/libhdf5_hl.200.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_conv.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_errors.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/attrs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/datatype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/dims.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/files.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/filters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/group.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/selections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/selections2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_hl/vds.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_objects.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_proxy.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/_selector.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/defs.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5a.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5ac.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5d.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5ds.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5f.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5fd.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5g.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5i.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5l.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5o.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5p.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5pl.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5py_warnings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5r.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5s.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5t.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/h5z.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/ipy_completer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/conftest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/data_files/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/data_files/vlen_string_dset.h5" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/data_files/vlen_string_dset_utc.h5" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/data_files/vlen_string_s390x.h5" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_attribute_create.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_attrs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_attrs_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_big_endian_file.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_completions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dataset_getitem.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dataset_swmr.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_datatype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dimension_scales.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dims_dimensionproxy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_dtype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_file.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_file2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_file_image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_filters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_group.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5d_direct_chunk.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5f.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5p.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5pl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_h5t.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_objects.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_selections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_slicing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_vds/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_vds/test_highlevel_vds.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_vds/test_lowlevel_vds.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/tests/test_vds/test_virtual_source.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/utils.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/h5py/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/LICENSE.rst" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna-2.10.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/codec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/idnadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/intranges.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/package_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/idna/uts46data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata-4.0.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_adapters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_collections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_functools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_itertools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_meta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/_text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/importlib_metadata/py.typed" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/activations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v1/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/utils/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/_v2/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/api/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/densenet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/efficientnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/imagenet_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/inception_resnet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/inception_v3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/mobilenet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/mobilenet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/mobilenet_v3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/nasnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/resnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/resnet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/vgg16.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/vgg19.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/applications/xception.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/backend.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/backend_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/callbacks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/callbacks_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/constraints.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/boston_housing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/cifar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/cifar10.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/cifar100.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/fashion_mnist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/imdb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/mnist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/datasets/reuters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/distribute_strategy_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/distributed_file_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/distributed_training_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/distributed_training_utils_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_correctness_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_dnn_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_embedding_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_image_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_rnn_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_stateful_lstm_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/keras_utils_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/model_collection_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/model_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/multi_worker_testing_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/optimizer_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/saved_model_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/sidecar_evaluator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/simple_models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/strategy_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/test_example.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/distribute/worker_training_state.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/base_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/base_layer_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/base_layer_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/base_preprocessing_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/compile_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/data_adapter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/functional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/input_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/input_spec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/keras_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/node.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/partial_batch_padding_handler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/saving.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/sequential.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_arrays_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_distributed_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_eager_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_generator_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_utils_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/engine/training_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/feature_column/base_feature_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/feature_column/dense_features.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/feature_column/dense_features_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/feature_column/sequence_feature_column.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/initializers/initializers_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/initializers/initializers_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/keras_parameterized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/advanced_activations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/convolutional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/convolutional_recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/cudnn_recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/dense_attention.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/einsum_dense.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/embeddings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/kernelized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/legacy_rnn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/legacy_rnn/rnn_cell_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/legacy_rnn/rnn_cell_wrapper_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/local.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/merge.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/multi_head_attention.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/noise.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/normalization_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/pooling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/benchmarks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/benchmarks/feature_column_benchmark.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/category_crossing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/category_encoding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/discretization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/hashing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/image_preprocessing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/index_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/integer_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/preprocessing_stage.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/preprocessing_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/reduction.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/string_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/table_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/preprocessing/text_vectorization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/recurrent_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/rnn_cell_wrapper_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/layers/wrappers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/convolutional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/legacy_tf_layers/pooling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/losses.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/metrics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/autocast_variable.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/device_compatibility_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/get_layer_policy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/loss_scale.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/loss_scale_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/policy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/mixed_precision/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/adadelta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/adagrad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/adam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/adamax.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/ftrl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/gradient_descent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/learning_rate_schedule.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/legacy_learning_rate_decay.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/nadam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/optimizer_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/rmsprop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizer_v2/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/optimizers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/premade/linear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/premade/wide_deep.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/dataset_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/image_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/sequence.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/text_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/preprocessing/timeseries.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/protobuf/projector_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/protobuf/saved_metadata_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/protobuf/versions_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/regularizers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/hdf5_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/model_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/base_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/json_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/layer_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/load.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/load_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/metric_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/model_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/network_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/save_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/serialized_attributes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saved_model_experimental.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/saving_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/export_output.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/export_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/mode_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/signature_def_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/saving/utils_v1/unexported_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/testing_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/tests/model_architectures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/tests/model_subclassing_test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/type/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/type/types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/all_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/control_flow_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/conv_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/data_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/dataset_creator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/generic_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/io_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/kernelized_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/kpl_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/layer_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/losses_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/metrics_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/mode_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/multi_gpu_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/np_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/object_identity.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/tf_contextlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/tf_inspect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/tf_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/version_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/utils/vis_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras/wrappers/scikit_learn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_nightly-2.5.0.dev2021032900.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_nightly-2.5.0.dev2021032900.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_nightly-2.5.0.dev2021032900.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_nightly-2.5.0.dev2021032900.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_nightly-2.5.0.dev2021032900.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/affine_transformations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/dataframe_iterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/directory_iterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/image_data_generator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/iterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/numpy_array_iterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/image/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/sequence.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/keras_preprocessing/text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver-1.3.1.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/kiwisolver.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/__meta__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/blockparser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/blockprocessors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/abbr.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/admonition.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/attr_list.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/codehilite.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/def_list.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/extra.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/fenced_code.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/footnotes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/legacy_attrs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/legacy_em.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/md_in_html.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/meta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/nl2br.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/sane_lists.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/smarty.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/tables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/toc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/extensions/wikilinks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/htmlparser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/inlinepatterns.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/pep562.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/postprocessors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/preprocessors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/serializers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/test_tools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/treeprocessors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/markdown/util.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2-py3.7-nspkg.pth" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_AMSFONTS" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_BAKOMA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_CARLOGO" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_COLORBREWER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_JSXTOOLS_RESIZE_OBSERVER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_QHULL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_QT4_EDITOR" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_SOLARIZED" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_STIX" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/LICENSE_YORICK" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/REQUESTED" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/namespace_packages.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib-3.4.2.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_animation_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_api/deprecation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_c_internal_utils.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_cm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_cm_listed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_color_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_constrained_layout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_contour.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_enums.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_image.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_internal_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_layoutgrid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_mathtext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_mathtext_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_path.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_pylab_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_qhull.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_text_layout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_tri.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_ttconv.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/afm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/animation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/artist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axes/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axes/_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axes/_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axes/_secondary_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axes/_subplots.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/axis.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backend_bases.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backend_managers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backend_tools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/_backend_agg.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/_backend_pdf_ps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/_backend_tk.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/_macosx.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/_tkagg.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_agg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_cairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_gtk3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_gtk3agg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_gtk3cairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_macosx.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_mixed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_nbagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_pdf.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_pgf.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_ps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt4.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt4agg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt4cairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt5.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt5agg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_qt5cairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_svg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_template.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_tkagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_tkcairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_webagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_webagg_core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_wx.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_wxagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/backend_wxcairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_editor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_editor/_formlayout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_editor/_formsubplottool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_editor/figureoptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/qt_editor/formsubplottool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/.eslintrc.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/.prettierignore" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/.prettierrc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/all_figures.html" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/css/boilerplate.css" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/css/fbm.css" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/css/mpl.css" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/css/page.css" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/ipython_inline_figure.html" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/js/mpl.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/js/mpl_tornado.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/js/nbagg_mpl.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/nbagg_uat.ipynb" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/package.json" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/backends/web_backend/single_figure.html" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/bezier.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/blocking_input.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/category.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/cbook/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/cbook/deprecation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/cm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/collections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/colorbar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/colors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/container.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/contour.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/dates.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/docstring.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/dviread.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/figure.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/font_manager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/fontconfig_pattern.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/ft2font.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/gridspec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/hatch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/legend.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/legend_handler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/lines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/markers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mathtext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mlab.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/cmex10.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/cmmi10.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/cmr10.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/cmsy10.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/cmtt10.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pagd8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pagdo8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pagk8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pagko8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pbkd8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pbkdi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pbkl8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pbkli8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pcrb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pcrbo8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pcrr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pcrro8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvb8an.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvbo8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvbo8an.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvl8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvlo8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvr8an.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvro8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/phvro8an.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pncb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pncbi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pncr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pncri8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pplb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pplbi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pplr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pplri8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/psyr.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/ptmb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/ptmbi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/ptmr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/ptmri8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/putb8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/putbi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/putr8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/putri8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pzcmi8a.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm/pzdr.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Bold.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Courier-BoldOblique.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Oblique.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Courier.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Bold.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-BoldOblique.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Oblique.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Symbol.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Times-Bold.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Times-BoldItalic.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Times-Italic.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/Times-Roman.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/ZapfDingbats.afm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts/readme.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSans-Bold.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSans-BoldOblique.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSans-Oblique.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSans.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansDisplay.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Bold.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-BoldOblique.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Oblique.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansMono.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Bold.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif-BoldItalic.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Italic.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerifDisplay.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/LICENSE_DEJAVU" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/LICENSE_STIX" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneral.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralBolIta.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralItalic.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXNonUni.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXNonUniBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXNonUniBolIta.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXNonUniIta.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizFiveSymReg.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizFourSymBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizFourSymReg.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizOneSymBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizOneSymReg.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymReg.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymBol.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymReg.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmb10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmex10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmmi10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmr10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmss10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmsy10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/cmtt10.ttf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/back-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/back.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/back.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/back.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/back_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/filesave-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/filesave.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/filesave.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/filesave.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/filesave_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/forward-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/forward.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/forward.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/forward.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/forward_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/hand.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/hand.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/hand.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/help-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/help.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/help.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/help.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/help_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/home-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/home.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/home.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/home.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/home_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/matplotlib.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/matplotlib.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/matplotlib.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/matplotlib_128.ppm" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/matplotlib_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/move-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/move.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/move.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/move.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/move_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/qt4_editor_options.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/qt4_editor_options.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/qt4_editor_options.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/qt4_editor_options_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/subplots-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/subplots.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/subplots.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/subplots.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/subplots_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/zoom_to_rect-symbolic.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/zoom_to_rect.pdf" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/zoom_to_rect.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/zoom_to_rect.svg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/images/zoom_to_rect_large.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/matplotlibrc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/plot_directive/plot_directive.css" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/Minduka_Present_Blue_Pack.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/README.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/axes_grid/bivariate_normal.npy" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/data_x_x2_x3.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/eeg.dat" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/embedding_in_wx3.xrc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/goog.npz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/grace_hopper.jpg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/jacksboro_fault_dem.npz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/logo2.png" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/membrane.dat" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/msft.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/percent_bachelors_degrees_women_usa.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/s1045.ima.gz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/sample_data/topobathy.npz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/Solarize_Light2.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/_classic_test_patch.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/bmh.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/classic.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/dark_background.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/fast.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/fivethirtyeight.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/ggplot.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/grayscale.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-bright.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-colorblind.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-dark-palette.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-dark.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-darkgrid.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-deep.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-muted.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-notebook.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-paper.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-pastel.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-poster.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-talk.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-ticks.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-white.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn-whitegrid.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/seaborn.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/tableau-colorblind10.mplstyle" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/offsetbox.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/patches.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/path.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/patheffects.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/projections/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/projections/geo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/projections/polar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/pylab.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/pyplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/quiver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/rcsetup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/sankey.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/scale.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/sphinxext/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/sphinxext/mathmpl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/sphinxext/plot_directive.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/spines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/stackplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/streamplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/style/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/style/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/table.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/compare.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/conftest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/decorators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/Duration.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/Epoch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/EpochConverter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/StrConverter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/UnitDbl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/UnitDblConverter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/UnitDblFormatter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/jpl_units/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/testing/widgets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/conftest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_afm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_agg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_agg_filter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_animation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_arrow_patches.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_artist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_bases.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_cairo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_gtk3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_nbagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_pdf.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_pgf.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_ps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_qt.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_svg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_tk.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_tools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backend_webagg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_backends_interactive.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_basic.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_bbox_tight.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_category.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_cbook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_collections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_colorbar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_colors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_compare_images.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_constrainedlayout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_container.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_contour.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_cycles.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_dates.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_determinism.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_dviread.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_figure.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_font_manager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_fontconfig_pattern.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_gridspec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_legend.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_lines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_marker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_mathtext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_matplotlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_mlab.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_offsetbox.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_patches.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_path.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_patheffects.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_pickle.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_png.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_polar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_preprocess_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_pyplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_quiver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_rcparams.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_sankey.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_scale.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_simplification.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_skew.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_sphinxext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_spines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_streamplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_style.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_subplots.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_table.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_testing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_texmanager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_ticker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_tightlayout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_transforms.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_triangulation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_ttconv.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_type1font.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_units.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_usetex.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tests/test_widgets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/texmanager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/textpath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/ticker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tight_bbox.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tight_layout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/transforms.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/triangulation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/tricontour.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/trifinder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/triinterpolate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/tripcolor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/triplot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/trirefine.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/tri/tritools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/ttconv.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/type1font.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/units.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/matplotlib/widgets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/anchored_artists.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/angle_helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axes_divider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axes_grid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axes_rgb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axes_size.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axis_artist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axisline_style.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/axislines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/clip_path.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/floating_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/grid_finder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/grid_helper_curvelinear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/inset_locator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/parasite_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/anchored_artists.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/axes_divider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/axes_grid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/axes_rgb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/axes_size.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/inset_locator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/mpl_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid1/parasite_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/angle_helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axes_divider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axes_grid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axes_rgb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axis_artist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axisline_style.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/axislines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/clip_path.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/floating_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/grid_finder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/grid_helper_curvelinear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/axisartist/parasite_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/mplot3d/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/mplot3d/art3d.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/mplot3d/axes3d.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/mplot3d/axis3d.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/mplot3d/proj3d.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/conftest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axes_grid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axes_grid1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_angle_helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_axis_artist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_axislines.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_clip_path.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_floating_axes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_grid_finder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_axisartist_grid_helper_curvelinear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/mpl_toolkits/tests/test_mplot3d.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/LICENSES_bundled.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy-1.19.5.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/.dylibs/libgcc_s.1.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/.dylibs/libgfortran.3.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/.dylibs/libopenblas.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/.dylibs/libquadmath.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/__config__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/__init__.cython-30.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/__init__.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/_distributor_init.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/_globals.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/_pytesttester.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/_inspect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/py3k.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/compat/tests/test_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/conftest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_add_newdocs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_asarray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_dtype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_dtype_ctypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_internal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_methods.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_multiarray_tests.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_multiarray_umath.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_operand_flag_tests.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_rational_tests.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_string_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_struct_ufunc_tests.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_type_aliases.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_ufunc_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/_umath_tests.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/arrayprint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/cversions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/defchararray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/einsumfunc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/fromnumeric.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/function_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/generate_numpy_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/getlimits.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/__multiarray_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/__ufunc_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/_neighborhood_iterator_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/_numpyconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/arrayobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/arrayscalars.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/halffloat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/multiarray_api.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/ndarrayobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/ndarraytypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/noprefix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_1_7_deprecated_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_3kcompat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_endian.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_interrupt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_math.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_no_deprecated_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/npy_os.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/numpyconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/old_defines.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/oldnumeric.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/random/bitgen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/random/distributions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/ufunc_api.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/ufuncobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/include/numpy/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/lib/libnpymath.a" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/lib/npy-pkg-config/mlib.ini" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/lib/npy-pkg-config/npymath.ini" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/machar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/memmap.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/multiarray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/numeric.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/numerictypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/overrides.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/records.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/setup_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/shape_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/_locales.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/astype_copy.pkl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/recarray_from_file.fits" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/umath-validation-set-README" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/umath-validation-set-cos" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/umath-validation-set-exp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/umath-validation-set-log" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/data/umath-validation-set-sin" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test__exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_abc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_arrayprint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_conversion_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_cpu_features.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_datetime.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_defchararray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_deprecations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_dtype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_einsum.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_errstate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_extint128.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_function_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_getlimits.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_half.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_indexerrors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_indexing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_item_selection.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_longdouble.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_machar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_mem_overlap.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_memmap.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_multiarray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_nditer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_numeric.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_numerictypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_overrides.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_print.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_protocols.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_records.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalar_ctors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalar_methods.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalarbuffer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalarinherit.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalarmath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_scalarprint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_shape_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_ufunc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_umath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_umath_accuracy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_umath_complex.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/tests/test_unicode.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/umath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/core/umath_tests.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ctypeslib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/__config__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/_shell_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/ccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/autodist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/bdist_rpm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build_clib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build_ext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build_py.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build_scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/build_src.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/config_compiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/develop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/egg_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/install_clib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/install_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/install_headers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/command/sdist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/conv_template.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/cpuinfo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/exec_command.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/extension.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/absoft.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/compaq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/environment.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/g95.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/gnu.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/hpux.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/ibm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/intel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/lahey.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/mips.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/nag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/none.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/nv.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/pathf95.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/pg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/sun.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/fcompiler/vast.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/from_template.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/intelccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/lib2def.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/line_endings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/log.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/mingw/gfortran_vs2003_hack.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/mingw32ccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/misc_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/msvc9compiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/msvccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/npy_pkg_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/numpy_distribution.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/pathccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/system_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_exec_command.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_fcompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_fcompiler_gnu.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_fcompiler_intel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_fcompiler_nagfor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_from_template.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_mingw32ccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_misc_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_npy_pkg_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_shell_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/tests/test_system_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/distutils/unixccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/basics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/broadcasting.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/byteswapping.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/creation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/dispatch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/glossary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/indexing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/internals.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/misc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/structured_arrays.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/subclassing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/doc/ufuncs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/dual.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/__version__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/auxfuncs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/capi_maps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/cb_rules.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/cfuncs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/common_rules.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/crackfortran.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/diagnose.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/f2py2e.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/f2py_testing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/f90mod_rules.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/func2subr.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/rules.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/src/fortranobject.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/src/fortranobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/array_from_pyobj/wrapmodule.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/assumed_shape/.f2py_f2cmap" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/assumed_shape/foo_free.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/assumed_shape/foo_mod.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/assumed_shape/foo_use.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/assumed_shape/precision.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/common/block.f" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/kind/foo.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/mixed/foo.f" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/mixed/foo_fixed.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/mixed/foo_free.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/parameter/constant_both.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/parameter/constant_compound.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/parameter/constant_integer.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/parameter/constant_non_compound.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/parameter/constant_real.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/regression/inout.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/size/foo.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/src/string/char.f90" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_array_from_pyobj.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_assumed_shape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_block_docstring.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_callback.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_compile_function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_crackfortran.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_kind.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_mixed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_parameter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_quoted_character.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_return_character.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_return_complex.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_return_integer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_return_logical.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_return_real.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_semicolon_split.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_size.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/test_string.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/tests/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/f2py/use_rules.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/_pocketfft.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/_pocketfft_internal.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/tests/test_helper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/fft/tests/test_pocketfft.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/_datasource.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/_iotools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/arraypad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/arraysetops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/arrayterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/financial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/function_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/histograms.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/index_tricks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/mixins.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/nanfunctions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/npyio.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/polynomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/recfunctions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/scimath.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/shape_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/stride_tricks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/py2-objarr.npy" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/py2-objarr.npz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/py3-objarr.npy" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/py3-objarr.npz" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/python3.npy" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/data/win64python2.npy" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test__datasource.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test__iotools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test__version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_arraypad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_arraysetops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_arrayterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_financial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_function_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_histograms.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_index_tricks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_mixins.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_nanfunctions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_packbits.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_polynomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_recfunctions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_shape_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_stride_tricks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_twodim_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_type_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_ufunclike.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/tests/test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/twodim_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/type_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/ufunclike.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/user_array.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/lib/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/_umath_linalg.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/lapack_lite.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/linalg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/tests/test_build.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/tests/test_deprecations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/tests/test_linalg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/linalg/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/bench.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/extras.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/mrecords.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_deprecations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_extras.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_mrecords.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_old_ma.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/tests/test_subclassing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/testutils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/ma/timer_comparison.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/defmatrix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_defmatrix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_interaction.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_masked_matrix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_matrix_linalg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_multiarray.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_numeric.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/matrixlib/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/_polybase.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/chebyshev.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/hermite.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/hermite_e.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/laguerre.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/legendre.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/polynomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/polyutils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_chebyshev.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_classes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_hermite.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_hermite_e.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_laguerre.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_legendre.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_polynomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_polyutils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/polynomial/tests/test_printing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/__init__.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_bounded_integers.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_bounded_integers.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_common.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_common.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/cffi/extending.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/cffi/parse.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/cython/extending.pyx" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/cython/extending_distributions.pyx" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/cython/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/numba/extending.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_examples/numba/extending_distributions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_generator.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_mt19937.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_pcg64.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_philox.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_pickle.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/_sfc64.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/bit_generator.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/bit_generator.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/c_distributions.pxd" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/lib/libnpyrandom.a" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/mtrand.cpython-37m-darwin.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/mt19937-testset-1.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/mt19937-testset-2.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/pcg64-testset-1.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/pcg64-testset-2.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/philox-testset-1.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/philox-testset-2.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/sfc64-testset-1.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/data/sfc64-testset-2.csv" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_direct.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_extending.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_generator_mt19937.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_generator_mt19937_regressions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_random.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_randomstate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_randomstate_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_regression.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_seed_sequence.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/random/tests/test_smoke.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/decorators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/noseclasses.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/nosetester.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/parameterized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/_private/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/print_coercion_tables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/tests/test_decorators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/tests/test_doctesting.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/tests/test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/testing/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_ctypeslib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_matlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_numpy_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_public_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_reloading.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/tests/test_warnings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/numpy/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib-3.1.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/parameters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/request_validator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/signature.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth1/rfc5849/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/introspect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/parameters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/request_validator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/tokens.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/oauth2/rfc6749/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/endpoints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/endpoints/pre_configured.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/endpoints/userinfo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/authorization_code.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/dispatchers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/hybrid.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/grant_types/implicit.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/request_validator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/openid/connect/core/tokens.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/signals.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/oauthlib/uri_validate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/LICENSE-3RD-PARTY.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/REQUESTED" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opencv_python-4.5.2.52.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum-3.3.0.dist-info/zip-safe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/cupy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/dispatch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/jax.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/object_arrays.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/tensorflow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/theano.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/backends/torch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/blas.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/contract.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/path_random.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/paths.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/sharing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_backends.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_blas.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_contract.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_edge_cases.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_input.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_paths.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/opt_einsum/tests/test_sharing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/import_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/import_utils_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/inline.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/inline_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/rename.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/augment/rename_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/annotate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/annotate_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/ast_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/ast_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/ast_utils_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/codegen.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/codegen_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/formatting.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/fstring_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/scope.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/scope_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/test_utils_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pasta/base/token_generator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip-21.1.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/build_env.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/base_command.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/command_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/main.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/progress_bars.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/req_command.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/spinners.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/completion.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/configuration.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/debug.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/download.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/freeze.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/hash.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/help.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/list.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/search.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/show.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/commands/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/configuration.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/distributions/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/distributions/installed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/distributions/sdist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/index/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/index/collector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/index/package_finder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/index/sources.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/locations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/locations/_distutils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/locations/_sysconfig.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/locations/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/main.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/metadata/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/metadata/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/metadata/pkg_resources.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/candidate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/direct_url.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/format_control.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/index.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/link.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/scheme.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/search_scope.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/target_python.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/models/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/download.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/lazy_wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/network/xmlrpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/build/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/build/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/build/metadata_legacy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/build/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/build/wheel_legacy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/freeze.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/install/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/install/editable_legacy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/install/legacy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/install/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/operations/prepare.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/pyproject.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/constructors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/req_file.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/req_install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/req_set.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/legacy/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/legacy/resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/candidates.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/factory.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/provider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/reporter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/requirements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/self_outdated_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/compatibility_tags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/datetime.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/direct_url_helpers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/distutils_args.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/encoding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/entrypoints.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/filetypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/glibc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/hashes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/inject_securetransport.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/logging.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/misc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/packaging.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/parallel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/pkg_resources.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/subprocess.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/unpacking.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/urls.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/utils/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/git.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_internal/wheel_builder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/appdirs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/certifi/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langrussianmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/metadata/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/metadata/languages.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/chardet/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/database.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/index.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/markers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/distro.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/serializer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/codec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/idnadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/intranges.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/package_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/idna/uts46data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/msgpack/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/msgpack/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/msgpack/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/msgpack/ext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/msgpack/fallback.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/__about__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/_structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/_typing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/markers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/requirements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/specifiers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/tags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/packaging/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/build.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/colorlog.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/dirtools.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/envbuild.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/in_process/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/in_process/_in_process.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/meta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pep517/wrappers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pkg_resources/py31compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/progress/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/progress/bar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/progress/counter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/progress/spinner.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/pyparsing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/__version__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/_internal_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/certs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/help.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/hooks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/packages.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/status_codes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/requests/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/providers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/reporters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/resolvers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/resolvelib/structs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/six.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/_asyncio.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/after.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/before.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/before_sleep.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/nap.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/retry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/stop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/tornadoweb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/tenacity/wait.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/toml/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/toml/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/toml/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/toml/ordered.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/toml/tz.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/_collections.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/_version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/connection.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/connectionpool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/socks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/fields.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/filepost.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/six.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/poolmanager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/request.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/connection.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/proxy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/queue.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/request.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/response.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/retry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssl_.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssltransport.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/timeout.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/util/wait.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/vendor.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/webencodings/labels.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/webencodings/mklabels.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/webencodings/tests.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/_vendor/webencodings/x_user_defined.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pip/py.typed" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/appdirs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/__about__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/_structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/_typing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/markers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/requirements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/tags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/_vendor/pyparsing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/extern/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0-py3.7-nspkg.pth" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/namespace_packages.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/protobuf-3.17.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/LICENSE.rst" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1-0.4.8.dist-info/zip-safe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/ber/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/ber/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/ber/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/ber/eoo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/cer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/cer/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/cer/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/der/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/der/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/der/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/native/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/native/decoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/codec/native/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/binary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/calling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/dateandtime.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/integer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/octets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/compat/string.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/debug.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/error.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/char.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/constraint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/error.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/namedtype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/namedval.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/opentype.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/tag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/tagmap.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/univ.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1/type/useful.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules-0.2.8.dist-info/zip-safe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/pem.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc1155.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc1157.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc1901.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc1902.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc1905.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2251.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2314.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2315.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2437.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2459.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2511.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2560.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2631.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2634.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2985.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc2986.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3114.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3161.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3274.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3279.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3280.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3281.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3412.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3414.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3447.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3560.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3565.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3709.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3770.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3779.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc3852.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4043.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4055.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4073.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4108.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4210.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4211.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4334.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc4985.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5035.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5083.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5084.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5208.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5280.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5480.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5649.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5652.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5751.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5755.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5913.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5914.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5915.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5916.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5917.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5924.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5934.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5940.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5958.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc5990.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6010.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6019.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6031.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6032.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6120.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6170.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6187.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6210.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6211.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6402-1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6402.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6482.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6486.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6487.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6664.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6955.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc6960.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7030.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7191.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7229.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7292.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7296.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7508.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7585.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7633.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7773.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7894-1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7894.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7906.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc7914.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8017.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8018.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8103.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8209.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8226.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8358.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8360.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8398.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8410.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8418.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8419.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8479.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8494.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8520.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8619.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyasn1_modules/rfc8649.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pylab.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing-2.4.7.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/pyparsing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/python_dateutil-2.8.1.dist-info/zip-safe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__version__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/_internal_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/adapters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/certs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/cookies.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/help.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/hooks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/packages.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/sessions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/status_codes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib-1.3.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/douban.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/facebook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/fitbit.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/instagram.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/linkedin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/slack.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/compliance_fixes/weibo.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/oauth1_auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/oauth1_session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/oauth2_auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests_oauthlib/oauth2_session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa-4.7.2.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/asn1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/cli.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/key.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/parallel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/pem.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/pkcs1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/pkcs1_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/prime.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/randnum.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/transform.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/rsa/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/dependency_links.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools-56.2.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_deprecation_warning.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/_msvccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/archive_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/bcppcompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/ccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/cmd.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/bdist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/bdist_dumb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/bdist_msi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/bdist_rpm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/bdist_wininst.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/build.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/build_clib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/build_ext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/build_py.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/build_scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/clean.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install_egg_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install_headers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/install_scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/py37compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/register.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/sdist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/command/upload.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/cygwinccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/debug.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/dep_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/dir_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/dist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/extension.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/fancy_getopt.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/file_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/filelist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/log.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/msvc9compiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/msvccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/py35compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/py38compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/spawn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/sysconfig.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/text_file.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/unixccompiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_distutils/versionpredicate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_imp.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/ordered_set.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/__about__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/_structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/_typing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/markers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/requirements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/specifiers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/tags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/packaging/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/_vendor/pyparsing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/archive_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/build_meta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/cli-32.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/cli-64.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/cli.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/alias.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/bdist_egg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/bdist_rpm.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/build_clib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/build_ext.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/build_py.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/develop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/dist_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/easy_install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/egg_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/install.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/install_egg_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/install_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/install_scripts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/launcher manifest.xml" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/py36compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/register.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/rotate.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/saveopts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/sdist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/setopt.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/upload.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/command/upload_docs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/dep_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/depends.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/dist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/extension.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/extern/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/glob.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/gui-32.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/gui-64.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/gui.exe" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/installer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/launch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/lib2to3_ex.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/monkey.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/msvc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/namespaces.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/package_index.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/py34compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/sandbox.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/script (dev).tmpl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/script.tmpl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/ssl_support.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/unicode_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/wheel.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/setuptools/windows_support.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six-1.15.0.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/six.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard-2.5.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/callbacks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/encoding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/linkifier.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/sanitizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/bleach/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_ihatexml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_inputstream.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_tokenizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_trie/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_trie/_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_trie/datrie.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_trie/py.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/alphabeticalattributes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/inject_meta_charset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/lint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/optionaltags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/sanitizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/filters/whitespace.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/html5parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/serializer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treeadapters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treeadapters/genshi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treeadapters/sax.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treebuilders/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treebuilders/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treebuilders/dom.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treebuilders/etree.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treebuilders/etree_lxml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/dom.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/etree.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/etree_lxml.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/html5lib/treewalkers/genshi.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/webencodings/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/webencodings/labels.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/webencodings/mklabels.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/_vendor/webencodings/x_user_defined.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/assets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/application.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/empty_path_redirect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/data_ingester.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/data_provider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/directory_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/directory_watcher.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/event_accumulator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/event_file_inspector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/event_file_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/event_multiplexer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/io_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/plugin_asset_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/plugin_event_accumulator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/plugin_event_multiplexer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/reservoir.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/event_processing/tag_types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/experiment_id.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/experimental_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/http_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/json_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/path_prefix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/process_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/backend/security_validator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/allocation_description_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/api_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/attr_value_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/cluster_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/cost_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/cpp_shape_inference_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/debug_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/event_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/function_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/meta_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/node_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/op_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/resource_handle_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/rewriter_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/saved_object_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/saver_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/step_stats_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/struct_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/summary_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/tensor_description_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/tensor_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/tensor_shape_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/tfprof_log_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/trackable_object_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/types_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/variable_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/verifier_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/proto/versions_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/app.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/compat/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/error_codes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/flags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/io/gfile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/tensor_shape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/experimental/base_experiment.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/experimental/experiment_from_dev.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/grpc_provider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/ingester.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/proto/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/proto/data_provider_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/proto/data_provider_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/provider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data/server_ingester.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/data_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/dataclass_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/default.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/lazy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/main.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/main_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/manager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/notebook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugin_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/audio_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/audio/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/base_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/core/core_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/custom_scalar/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/custom_scalar/custom_scalars_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/custom_scalar/layout_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/custom_scalar/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/custom_scalar/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/debugger_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/debugger_v2/debug_data_multiplexer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/debugger_v2/debug_data_provider.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/debugger_v2/debugger_v2_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/distribution/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/distribution/compressor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/distribution/distributions_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/distribution/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/graph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/graph/graph_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/graph/graphs_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/graph/keras_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/graph/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/histograms_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/histogram/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/api_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/backend_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/download_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/error.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/get_experiment.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/hparams_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/hparams_util_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/keras.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/list_metric_evals.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/list_session_groups.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/metrics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/hparams/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/images_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/image/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/mesh_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/mesh/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/metrics/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/metrics/metrics_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/npmi/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/npmi/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/npmi/npmi_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/npmi/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/pr_curve/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/pr_curve/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/pr_curve/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/pr_curve/pr_curves_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/pr_curve/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/profile_redirect/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/profile_redirect/profile_redirect_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/projector_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/projector_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/tf_projector_plugin/index.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/tf_projector_plugin/projector_binary.html" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/projector/tf_projector_plugin/projector_binary.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/scalars_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/scalar/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/plugin_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/summary_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text/text_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/plugins/text_v2/text_v2_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/program.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/_output.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/_tf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/_tf/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/_writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/writer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/writer/event_file_writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/summary/writer/record_writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/auth.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/dry_run_stubs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/exporter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/flags_parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/formatters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/logdir_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/blob_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/blob_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/experiment_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/experiment_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/export_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/export_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/scalar_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/scalar_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/server_info_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/server_info_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/tensor_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/tensor_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/write_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/proto/write_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/server_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/upload_tracker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/uploader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/uploader_subcommand.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/uploader/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/encoder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/grpc_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/lazy_tensor_creator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/op_evaluator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/platform_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/tb_logging.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/tensor_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/util/timing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard/webfiles.zip" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server-0.6.1.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server-0.6.1.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server-0.6.1.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server-0.6.1.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server-0.6.1.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_data_server/bin/server" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit-1.8.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_utils/common_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_utils/inference_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_utils/platform_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/classification_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/get_model_metadata_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/get_model_status_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/inference_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/input_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/model_management_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/model_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/model_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/model_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/predict_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/prediction_log_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/prediction_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/prediction_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/regression_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/apis/session_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/config/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/config/log_collector_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/config/logging_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/config/model_server_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/sources/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/sources/storage_path/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/sources/storage_path/file_system_storage_path_source_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/_vendor/tensorflow_serving/util/status_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/static/index.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/static/wit_tb_bin.html" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/static/wit_tb_bin.js" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/wit_plugin.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorboard_plugin_wit/wit_plugin_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/INSTALLER" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/METADATA" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/RECORD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/REQUESTED" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/WHEEL" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/entry_points.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow-2.5.0.dist-info/top_level.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/decorator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/dispatch/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/distribute/combinations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/distribute/multi_process_runner/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/eager_context/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/function/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/graph_util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/monitoring/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/nest/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/test/combinations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/tf2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/tracking/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__internal__/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/__operators__/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/audio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/autodiff/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/autograph/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/bitwise/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/app/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/audio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/autograph/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/bitwise/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/compat/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/compat/v1/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/compat/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/compat/v2/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/config/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/config/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/config/optimizer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/config/threading/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/data/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/data/experimental/service/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/debugging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/debugging/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/distribute/cluster_resolver/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/distribute/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/distributions/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/dtypes/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/errors/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/gfile/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/graph_util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/io/gfile/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/linalg/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/linalg/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/constants/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/experimental/microfrontend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/experimental/microfrontend/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/experimental/microfrontend/python/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lite/experimental/nn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/logging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lookup/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/lookup/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/manip/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/math/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/math/special/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/mlir/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/mlir/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/nest/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/nn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/nn/rnn_cell/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/python_io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/quantization/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/queue/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/ragged/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/random/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/raw_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/resource_loader/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/builder/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/constants/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/loader/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/main_op/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/signature_constants/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/signature_def_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/tag_constants/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/saved_model/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/sets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/signal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/sparse/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/spectral/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/strings/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/sysconfig/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/tpu/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/tpu/experimental/embedding/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/train/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/train/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/train/queue_runner/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/types/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/user_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/version/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v1/xla/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/decorator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/dispatch/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/distribute/combinations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/distribute/multi_process_runner/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/eager_context/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/function/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/graph_util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/monitoring/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/nest/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/test/combinations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/tf2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/tracking/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__internal__/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/__operators__/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/audio/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/autodiff/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/autograph/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/bitwise/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/compat/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/compat/v1/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/compat/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/compat/v2/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/config/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/config/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/config/optimizer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/config/threading/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/data/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/data/experimental/service/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/debugging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/debugging/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/distribute/cluster_resolver/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/distribute/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/distribute/experimental/coordinator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/distribute/experimental/partitioners/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/dtypes/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/errors/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/experimental/dlpack/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/experimental/numpy/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/experimental/numpy/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/experimental/tensorrt/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/graph_util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/io/gfile/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/linalg/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/linalg/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lite/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lite/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lite/experimental/microfrontend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lite/experimental/microfrontend/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lite/experimental/microfrontend/python/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lookup/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/lookup/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/math/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/math/special/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/mlir/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/mlir/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/nest/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/nn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/profiler/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/profiler/experimental/client/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/profiler/experimental/server/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/quantization/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/queue/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/ragged/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/random/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/raw_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/saved_model/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/sets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/signal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/sparse/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/strings/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/summary/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/sysconfig/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/tpu/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/tpu/experimental/embedding/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/train/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/train/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/types/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/version/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/compat/v2/xla/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/config/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/config/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/config/optimizer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/config/threading/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/data/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/data/experimental/service/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/debugging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/debugging/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/distribute/cluster_resolver/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/distribute/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/distribute/experimental/coordinator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/distribute/experimental/partitioners/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/dtypes/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/errors/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/experimental/dlpack/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/experimental/numpy/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/experimental/numpy/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/experimental/tensorrt/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/graph_util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/io/gfile/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/linalg/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/linalg/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lite/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lite/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lite/experimental/microfrontend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lite/experimental/microfrontend/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lite/experimental/microfrontend/python/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lookup/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/lookup/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/math/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/math/special/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/mlir/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/mlir/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/nest/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/nn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/profiler/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/profiler/experimental/client/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/profiler/experimental/server/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/quantization/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/queue/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/ragged/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/random/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/raw_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/saved_model/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/sets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/signal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/sparse/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/strings/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/summary/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/sysconfig/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/tpu/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/tpu/experimental/embedding/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/train/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/train/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/types/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/version/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/_api/v2/xla/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/jit/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/jit/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/jit/ops/xla_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/jit/ops/xla_ops_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/mlir/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/mlir/tensorflow/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/mlir/tensorflow/gen_mlir_passthrough_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/_pywrap_py_utils.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/ops/gen_trt_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2tensorrt/utils/trt_engine_instance_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/ops/_xla_ops.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/ops/gen_xla_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/python/xla.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/tf2xla/tf2xla_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/experimental/xla_sharding/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/experimental/xla_sharding/xla_sharding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/python_api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/python_api/types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/python_api/xla_shape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/service/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/service/hlo_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/compiler/xla/xla_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/debug/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/debug/debug_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/debug/debug_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/debug/debugger_event_metadata_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/example/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/example/example_parser_configuration_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/example/example_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/example/feature_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/allocation_description_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/api_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/attr_value_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/cost_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/dataset_options_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/device_attributes_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/function_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/graph_transfer_info_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/kernel_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/log_memory_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/model_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/node_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/op_def_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/reader_base_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/resource_handle_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/step_stats_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/summary_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/tensor_description_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/tensor_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/tensor_shape_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/tensor_slice_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/types_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/variable_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/framework/versions_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/grappler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/grappler/costs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/grappler/costs/op_performance_data_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/kernels/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/kernels/boosted_trees/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/kernels/boosted_trees/boosted_trees_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/lib/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/lib/core/error_codes_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profile_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_analysis_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_analysis_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_options_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_service_monitor_result_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_service_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/profiler_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/protobuf/trace_events_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/protobuf/xplane_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/tfprof_log_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/tfprof_options_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/profiler/tfprof_output_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/bfc_memory_map_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/cluster_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/control_flow_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/debug_event_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/debug_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/device_filters_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/device_properties_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/error_codes_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/extension_type_variant_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/graph_debug_info_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/meta_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/named_tensor_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/queue_runner_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/remote_tensor_handle_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/rewriter_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/saved_model_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/saved_object_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/saver_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/service_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/snapshot_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/struct_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tensor_bundle_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tensorflow_server_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/compilation_result_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/dynamic_padding_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/optimization_parameters_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/topology_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/tpu_embedding_configuration_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/tpu/tpu_embedding_output_layout_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/trackable_object_graph_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/transport_options_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/protobuf/verifier_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/util/event_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/util/memmapped_file_system_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/util/saved_tensor_slice_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/core/util/test_log_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Cholesky" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/CholmodSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Core" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Dense" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Eigen" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Eigenvalues" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Geometry" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Householder" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/IterativeLinearSolvers" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Jacobi" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/KLUSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/LU" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/MetisSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/OrderingMethods" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/PaStiXSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/PardisoSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/QR" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/QtAlignedMalloc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SPQRSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SVD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/Sparse" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SparseCholesky" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SparseCore" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SparseLU" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SparseQR" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/StdDeque" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/StdList" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/StdVector" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/SuperLUSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/UmfPackSupport" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Cholesky/LDLT.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Cholesky/LLT.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Cholesky/LLT_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/CholmodSupport/CholmodSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ArithmeticSequence.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ArrayBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ArrayWrapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Assign.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/AssignEvaluator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Assign_MKL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/BandMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Block.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/BooleanRedux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CommaInitializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ConditionEstimator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CoreEvaluators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CoreIterators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CwiseBinaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CwiseNullaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CwiseTernaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CwiseUnaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/CwiseUnaryView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/DenseBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/DenseCoeffsBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/DenseStorage.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Diagonal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/DiagonalMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/DiagonalProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Dot.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/EigenBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ForceAlignedAccess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Fuzzy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/GeneralProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/GenericPacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/GlobalFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/IO.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/IndexedView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Inverse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/MapBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/MathFunctionsImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Matrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/MatrixBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/NestByValue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/NoAlias.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/NumTraits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/PartialReduxEvaluator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/PermutationMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/PlainObjectBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Product.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ProductEvaluators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Redux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Replicate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Reshaped.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/ReturnByValue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Reverse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Select.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/SelfAdjointView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/SelfCwiseBinaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Solve.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/SolveTriangular.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/SolverBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/StableNorm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/StlIterators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Stride.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Swap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Transpose.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Transpositions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/TriangularMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/VectorBlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/VectorwiseOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/Visitor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX512/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX512/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX512/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AVX512/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/MatrixProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/MatrixProductCommon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/MatrixProductMMA.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/AltiVec/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/CUDA/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/BFloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/ConjHelper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/GenericPacketMathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/GenericPacketMathFunctionsFwd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/Half.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/Settings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/Default/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/GPU/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/GPU/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/GPU/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/HIP/hcc/math_constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/MSA/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/MSA/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/MSA/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/NEON/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/NEON/GeneralBlockPanelKernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/NEON/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/NEON/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/NEON/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SSE/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SSE/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SSE/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SSE/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SVE/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SVE/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SVE/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SYCL/InteropHeaders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SYCL/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SYCL/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SYCL/SyclMemoryModel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/SYCL/TypeCasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/ZVector/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/ZVector/MathFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/arch/ZVector/PacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/AssignmentFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/BinaryFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/NullaryFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/StlFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/TernaryFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/functors/UnaryFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralBlockPanelKernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixMatrixTriangular.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixMatrixTriangular_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixMatrix_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/GeneralMatrixVector_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/Parallelizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointMatrixMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointMatrixMatrix_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointMatrixVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointMatrixVector_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/SelfadjointRank2Update.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularMatrixMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularMatrixMatrix_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularMatrixVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularMatrixVector_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularSolverMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularSolverMatrix_BLAS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/products/TriangularSolverVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/BlasUtil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/ConfigureVectorization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/Constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/DisableStupidWarnings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/ForwardDeclarations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/IndexedViewHelper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/IntegralConstant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/MKL_support.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/Macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/Memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/Meta.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/ReenableStupidWarnings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/ReshapedHelper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/StaticAssert.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/SymbolicIndex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Core/util/XprHelper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/ComplexEigenSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/ComplexSchur.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/ComplexSchur_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/EigenSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/GeneralizedEigenSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/GeneralizedSelfAdjointEigenSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/HessenbergDecomposition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/MatrixBaseEigenvalues.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/RealQZ.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/RealSchur.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/RealSchur_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/SelfAdjointEigenSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/SelfAdjointEigenSolver_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Eigenvalues/Tridiagonalization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/AlignedBox.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/AngleAxis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/EulerAngles.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Homogeneous.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Hyperplane.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/OrthoMethods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/ParametrizedLine.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Quaternion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Rotation2D.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/RotationBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Scaling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Transform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Translation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/Umeyama.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Geometry/arch/Geometry_SIMD.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Householder/BlockHouseholder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Householder/Householder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Householder/HouseholderSequence.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/BasicPreconditioners.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/BiCGSTAB.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/ConjugateGradient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/IncompleteCholesky.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/IncompleteLUT.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/IterativeSolverBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/LeastSquareConjugateGradient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/IterativeLinearSolvers/SolveWithGuess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/Jacobi/Jacobi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/KLUSupport/KLUSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/Determinant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/FullPivLU.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/InverseImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/PartialPivLU.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/PartialPivLU_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/LU/arch/InverseSize4.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/MetisSupport/MetisSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/OrderingMethods/Amd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/OrderingMethods/Eigen_Colamd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/OrderingMethods/Ordering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/PaStiXSupport/PaStiXSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/PardisoSupport/PardisoSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/ColPivHouseholderQR.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/ColPivHouseholderQR_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/CompleteOrthogonalDecomposition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/FullPivHouseholderQR.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/HouseholderQR.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/QR/HouseholderQR_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SPQRSupport/SuiteSparseQRSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SVD/BDCSVD.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SVD/JacobiSVD.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SVD/JacobiSVD_LAPACKE.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SVD/SVDBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SVD/UpperBidiagonalization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCholesky/SimplicialCholesky.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCholesky/SimplicialCholesky_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/AmbiVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/CompressedStorage.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/ConservativeSparseSparseProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/MappedSparseMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseAssign.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseBlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseColEtree.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseCompressedBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseCwiseBinaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseCwiseUnaryOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseDenseProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseDiagonalProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseDot.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseFuzzy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseMatrixBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparsePermutation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseRedux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseRef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseSelfAdjointView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseSolverBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseSparseProductWithPruning.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseTranspose.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseTriangularView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseUtil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/SparseView.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseCore/TriangularSolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLUImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_Memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_Structs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_SupernodalMatrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_column_bmod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_column_dfs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_copy_to_ucol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_gemm_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_heap_relax_snode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_kernel_bmod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_panel_bmod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_panel_dfs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_pivotL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_pruneL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseLU/SparseLU_relax_snode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SparseQR/SparseQR.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/StlSupport/StdDeque.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/StlSupport/StdList.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/StlSupport/StdVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/StlSupport/details.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/SuperLUSupport/SuperLUSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/UmfPackSupport/UmfPackSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/Image.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/Kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/RealSvd2x2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/blas.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/lapack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/lapacke.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/misc/lapacke_mangling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/ArrayCwiseBinaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/ArrayCwiseUnaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/BlockMethods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/CommonCwiseBinaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/CommonCwiseUnaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/IndexedViewMethods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/MatrixCwiseBinaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/MatrixCwiseUnaryOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/Eigen/src/plugins/ReshapedMethods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/algorithm/algorithm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/algorithm/container.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/attributes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/call_once.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/casts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/const_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/atomic_hook.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/bits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/cycleclock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/direct_mmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/endian.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/errno_saver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/exponential_biased.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/fast_type_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/hide_ptr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/identity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/inline_variable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/invoke.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/low_level_alloc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/low_level_scheduling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/per_thread_tls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/raw_logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/scheduling_mode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock_akaros.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock_linux.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock_posix.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock_wait.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/spinlock_win32.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/sysinfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/thread_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/thread_identity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/throw_delegate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/tsan_mutex_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/unaligned_access.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/internal/unscaledcycleclock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/log_severity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/optimization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/policy_checks.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/base/thread_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/fixed_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/flat_hash_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/flat_hash_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/inlined_vector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/compressed_tuple.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/container_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/hash_function_defaults.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/hash_policy_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/hashtable_debug_hooks.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/hashtablez_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/have_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/inlined_vector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/layout.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/node_hash_policy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/raw_hash_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/internal/raw_hash_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/container/node_hash_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/address_is_readable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/demangle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/elf_mem_image.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_aarch64-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_arm-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_generic-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_powerpc-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_unimplemented-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_win32-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/stacktrace_x86-inl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/symbolize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/internal/vdso_support.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/leak_check.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/stacktrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/symbolize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/symbolize_darwin.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/symbolize_elf.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/symbolize_unimplemented.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/debugging/symbolize_win32.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/commandlineflag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/declare.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/flag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/commandlineflag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/flag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/path_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/private_handle_accessor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/program_name.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/internal/registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/marshalling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/reflection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/flags/usage_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/functional/bind_front.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/functional/function_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/functional/internal/front_binder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/functional/internal/function_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/hash/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/hash/internal/city.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/hash/internal/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/memory/memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/meta/type_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/numeric/int128.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/numeric/int128_have_intrinsic.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/numeric/int128_no_intrinsic.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/status/internal/status_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/status/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/status/status_payload_printer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/ascii.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/charconv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/cord.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/escaping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/char_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/charconv_bigint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/charconv_parse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/cord_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/escaping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/memutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/ostringstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/resize_uninitialized.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/stl_type_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/arg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/bind.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/checker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/extension.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/float_conversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/output.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_format/parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_join_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/str_split_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/internal/utf8.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/match.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/numbers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/str_cat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/str_format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/str_join.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/str_replace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/str_split.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/string_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/strip.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/strings/substitute.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/barrier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/blocking_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/create_thread_identity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/graphcycles.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/kernel_timeout.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/mutex_nonprod.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/per_thread_sem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/internal/waiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/synchronization/notification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/civil_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/clock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/include/cctz/civil_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/include/cctz/civil_time_detail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/include/cctz/time_zone.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/include/cctz/zone_info_source.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_fixed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_if.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_libc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/time_zone_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/cctz/src/tzfile.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/get_current_time_chrono.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/internal/get_current_time_posix.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/time/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/bad_optional_access.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/bad_variant_access.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/internal/optional.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/internal/span.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/internal/variant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/optional.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/span.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/types/variant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/absl/utility/utility.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/absl_py/absl/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/absl_py/absl/flags/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/absl_py/absl/logging/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/absl_py/absl/testing/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/absl_py/absl/third_party/unittest3_backport/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/arm_neon_2_x86_sse/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/astunparse_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/array_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/array_list.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/assert.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics_fallback.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics_gnu.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics_gnu_old.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/atomics_msvc.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/byte_buf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/byte_order.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/byte_order.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/clock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/clock.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/command_line_parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/condition_variable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/date_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/device_random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/encoding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/encoding.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/environment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/error.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/exports.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/hash_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/linked_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/linked_list.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/log_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/log_formatter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/log_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/lru_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.cbmc.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.fallback.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.gcc_overflow.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.gcc_x64_asm.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/math.msvc.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/posix/common.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/predicates.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/priority_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/private/array_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/private/byte_buf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/private/hash_table_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/private/lookup3.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/process.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/ring_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/ring_buffer.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/rw_lock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/stdbool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/stdint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/string.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/string.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/system_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/task_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/thread.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/uuid.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/zero.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/common/zero.inl" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/testing/aws_test_allocators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-common/include/aws/testing/aws_test_harness.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-event-stream/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-event-stream/include/aws/event-stream/event_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-c-event-stream/include/aws/event-stream/event_stream_exports.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/include/aws/checksums/crc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/include/aws/checksums/crc_jni.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/include/aws/checksums/exports.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/include/aws/checksums/private/cpuid.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws-checksums/include/aws/checksums/private/crc_priv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/AmazonSerializableWebServiceRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/AmazonStreamingWebServiceRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/AmazonWebServiceRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/AmazonWebServiceResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/Aws.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/Core_EXPORTS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/Globals.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/NoResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/Region.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/SDKConfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/Version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/VersionConfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/AWSAuthSigner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/AWSAuthSignerProvider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/AWSCredentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/AWSCredentialsProvider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/AWSCredentialsProviderChain.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/auth/STSCredentialsProvider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/AWSClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/AWSError.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/AWSErrorMarshaller.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/AsyncCallerContext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/ClientConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/CoreErrors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/DefaultRetryStrategy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/RetryStrategy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/client/SpecifiedRetryableErrorsRetryStrategy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/config/AWSProfileConfigLoader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/external/cjson/cJSON.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/external/tinyxml2/tinyxml2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/HttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/HttpClientFactory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/HttpRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/HttpResponse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/HttpTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/Scheme.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/URI.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/curl/CurlHandleContainer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/curl/CurlHttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/standard/StandardHttpRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/standard/StandardHttpResponse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/IXmlHttpRequest2HttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinConnectionPoolMgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinHttpConnectionPoolMgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinHttpSyncHttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinINetConnectionPoolMgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinINetSyncHttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/http/windows/WinSyncHttpClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/internal/AWSHttpResourceClient.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/CoreMetrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/DefaultMonitoring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/HttpClientMetrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/MonitoringFactory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/MonitoringInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/monitoring/MonitoringManager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/net/Net.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/net/SimpleUDP.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/Android.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/Environment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/FileSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/OSVersionInfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/Platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/Security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/Time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/platform/refs/IXmlHttpRequest2Ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/ARN.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/Array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/Cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/ConcurrentCache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/DNS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/DateTime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/EnumParseOverflowContainer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/FileSystemUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/GetTheLights.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/HashingUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/Outcome.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/ResourceManager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/StringUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/UUID.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/UnreferencedParam.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/base64/Base64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/Cipher.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/ContentCryptoMaterial.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/ContentCryptoScheme.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/CryptoBuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/CryptoStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/EncryptionMaterials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/Factories.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/HMAC.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/Hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/HashResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/KeyWrapAlgorithm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/MD5.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/SecureRandom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/Sha256.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/Sha256HMAC.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/bcrypt/CryptoImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/commoncrypto/CryptoImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/crypto/openssl/CryptoImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventDecoderStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventEncoderStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventHeader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventMessage.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStreamBuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStreamDecoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStreamEncoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStreamErrors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/event/EventStreamHandler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/json/JsonSerializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/AWSLogging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/ConsoleLogSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/DefaultLogSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/FormattedLogSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/LogLevel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/LogMacros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/LogSystemInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/NullLogSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/logging/android/LogcatLogSystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/AWSMemory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/MemorySystemInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSAllocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSDeque.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSList.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSMultiMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSQueue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSSet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSStack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSStreamFwd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSString.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSStringStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/AWSVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/memory/stl/SimpleStringStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/ratelimiter/DefaultRateLimiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/ratelimiter/RateLimiterInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/stream/ConcurrentStreamBuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/stream/PreallocatedStreamBuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/stream/ResponseStream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/stream/SimpleStreamBuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/threading/Executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/threading/ReaderWriterLock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/threading/Semaphore.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/threading/ThreadTask.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-core/include/aws/core/utils/xml/XmlSerializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3ARN.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3Client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3Endpoint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3ErrorMarshaller.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3Errors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3Request.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/S3_EXPORTS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AbortIncompleteMultipartUpload.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AbortMultipartUploadRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AbortMultipartUploadResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AccelerateConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AccessControlPolicy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AccessControlTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsAndOperator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsExportDestination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsS3BucketDestination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/AnalyticsS3ExportFileFormat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Bucket.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketAccelerateStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketCannedACL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketLifecycleConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketLocationConstraint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketLoggingStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketLogsPermission.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/BucketVersioningStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CORSConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CORSRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CSVInput.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CSVOutput.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CloudFunctionConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CommonPrefix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CompleteMultipartUploadRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CompleteMultipartUploadResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CompletedMultipartUpload.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CompletedPart.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CompressionType.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Condition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CopyObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CopyObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CopyObjectResultDetails.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CopyPartResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CreateBucketConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CreateBucketRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CreateBucketResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CreateMultipartUploadRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/CreateMultipartUploadResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DefaultRetention.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Delete.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketAnalyticsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketCorsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketEncryptionRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketInventoryConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketLifecycleRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketMetricsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketPolicyRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketReplicationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteBucketWebsiteRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteMarkerEntry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteMarkerReplication.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteMarkerReplicationStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectTaggingResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeleteObjectsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeletePublicAccessBlockRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/DeletedObject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Destination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/EncodingType.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Encryption.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/EncryptionConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ErrorDocument.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Event.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ExistingObjectReplication.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ExistingObjectReplicationStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ExpirationStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ExpressionType.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/FileHeaderInfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/FilterRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/FilterRuleName.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAccelerateConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAccelerateConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAclRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAclResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAnalyticsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketAnalyticsConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketCorsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketCorsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketEncryptionRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketEncryptionResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketInventoryConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketInventoryConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLifecycleConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLifecycleConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLocationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLocationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLoggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketLoggingResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketMetricsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketMetricsConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketNotificationConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketNotificationConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketPolicyRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketPolicyResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketPolicyStatusRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketPolicyStatusResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketReplicationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketReplicationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketRequestPaymentRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketRequestPaymentResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketTaggingResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketVersioningRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketVersioningResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketWebsiteRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetBucketWebsiteResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectAclRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectAclResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectLegalHoldRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectLegalHoldResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectLockConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectLockConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectRetentionRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectRetentionResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectTaggingResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectTorrentRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetObjectTorrentResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetPublicAccessBlockRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GetPublicAccessBlockResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/GlacierJobParameters.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Grant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Grantee.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/HeadBucketRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/HeadObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/HeadObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/IndexDocument.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Initiator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InputSerialization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryDestination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryEncryption.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryFormat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryFrequency.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryIncludedObjectVersions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryOptionalField.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventoryS3BucketDestination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/InventorySchedule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/JSONInput.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/JSONOutput.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/JSONType.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LambdaFunctionConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LifecycleConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LifecycleExpiration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LifecycleRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LifecycleRuleAndOperator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LifecycleRuleFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketAnalyticsConfigurationsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketAnalyticsConfigurationsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketInventoryConfigurationsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketInventoryConfigurationsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketMetricsConfigurationsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketMetricsConfigurationsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListBucketsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListMultipartUploadsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListMultipartUploadsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectVersionsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectVersionsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectsV2Request.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListObjectsV2Result.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListPartsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ListPartsResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/LoggingEnabled.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MFADelete.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MFADeleteStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetadataDirective.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetadataEntry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Metrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetricsAndOperator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetricsConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetricsFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MetricsStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/MultipartUpload.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/NoncurrentVersionExpiration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/NoncurrentVersionTransition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/NotificationConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/NotificationConfigurationDeprecated.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/NotificationConfigurationFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Object.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectCannedACL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectIdentifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockEnabled.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockLegalHold.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockLegalHoldStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockMode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockRetention.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockRetentionMode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectLockRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectStorageClass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectVersion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ObjectVersionStorageClass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/OutputLocation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/OutputSerialization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Owner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/OwnerOverride.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ParquetInput.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Part.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Payer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Permission.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PolicyStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Progress.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ProgressEvent.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Protocol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PublicAccessBlockConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketAccelerateConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketAclRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketAnalyticsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketCorsRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketEncryptionRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketInventoryConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketLifecycleConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketLoggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketMetricsConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketNotificationConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketPolicyRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketReplicationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketRequestPaymentRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketVersioningRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutBucketWebsiteRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectAclRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectAclResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectLegalHoldRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectLegalHoldResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectLockConfigurationRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectLockConfigurationResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectRetentionRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectRetentionResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectTaggingRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutObjectTaggingResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/PutPublicAccessBlockRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/QueueConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/QueueConfigurationDeprecated.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/QuoteFields.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RecordsEvent.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Redirect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RedirectAllRequestsTo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationRuleAndOperator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationRuleFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationRuleStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationTime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationTimeStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ReplicationTimeValue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RequestCharged.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RequestPayer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RequestPaymentConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RequestProgress.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RestoreObjectRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RestoreObjectResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RestoreRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RestoreRequestType.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/RoutingRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Rule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/S3KeyFilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/S3Location.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SSEKMS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SSES3.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ScanRange.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SelectObjectContentHandler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SelectObjectContentRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SelectParameters.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ServerSideEncryption.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ServerSideEncryptionByDefault.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ServerSideEncryptionConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/ServerSideEncryptionRule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SourceSelectionCriteria.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SseKmsEncryptedObjects.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/SseKmsEncryptedObjectsStatus.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/StatsEvent.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/StorageClass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/StorageClassAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/StorageClassAnalysisDataExport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/StorageClassAnalysisSchemaVersion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Tag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Tagging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/TaggingDirective.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/TargetGrant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Tier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/TopicConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/TopicConfigurationDeprecated.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Transition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/TransitionStorageClass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/Type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/UploadPartCopyRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/UploadPartCopyResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/UploadPartRequest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/UploadPartResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/VersioningConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-s3/include/aws/s3/model/WebsiteConfiguration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-transfer/include/aws/transfer/TransferHandle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-transfer/include/aws/transfer/TransferManager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/aws/aws-cpp-sdk-transfer/include/aws/transfer/Transfer_EXPORTS.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/asn1/asn1_locl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/bio/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/bytestring/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/chacha/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/cipher_extra/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/conf/conf_def.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/conf/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/cpu-arm-linux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/err/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/evp/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/aes/aes.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/aes/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/aes/key_wrap.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/aes/mode_wrappers.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/add.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/asm/x86_64-gcc.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/bn.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/bytes.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/cmp.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/ctx.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/div.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/div_extra.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/exponentiation.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/gcd.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/gcd_extra.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/generic.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/jacobi.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/montgomery.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/montgomery_inv.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/mul.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/prime.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/random.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/rsaz_exp.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/rsaz_exp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/shift.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/bn/sqrt.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/cipher/aead.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/cipher/cipher.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/cipher/e_aes.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/cipher/e_des.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/cipher/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/delocate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/des/des.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/des/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/digest/digest.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/digest/digests.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/digest/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/digest/md32_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/ec.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/ec_key.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/ec_montgomery.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/felem.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/oct.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/p224-64.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/p256-x86_64-table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/p256-x86_64.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/p256-x86_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/scalar.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/simple.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/simple_mul.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/util.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ec/wnaf.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ecdh/ecdh.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/ecdsa/ecdsa.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/hmac/hmac.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/md4/md4.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/md5/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/md5/md5.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/cbc.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/cfb.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/ctr.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/gcm.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/gcm_nohw.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/ofb.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/modes/polyval.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rand/ctrdrbg.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rand/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rand/rand.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rand/urandom.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rsa/blinding.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rsa/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rsa/padding.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rsa/rsa.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/rsa/rsa_impl.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/self_check/self_check.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/sha/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/sha/sha1-altivec.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/sha/sha1.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/sha/sha256.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/sha/sha512.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/tls/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/fipsmodule/tls/kdf.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/hrss/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/obj/obj_dat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/pkcs7/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/pkcs8/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/poly1305/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/pool/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509/charmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509/vpm_int.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509v3/ext_dat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509v3/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/crypto/x509v3/pcy_int.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/aead.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/aes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/arm_arch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/asn1.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/asn1_mac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/asn1t.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/base64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/bio.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/blowfish.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/bn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/buf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/bytestring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/cast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/chacha.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/cipher.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/cmac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/conf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/crypto.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/curve25519.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/des.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/dh.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/digest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/dsa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/dtls1.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/e_os2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ec.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ec_key.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ecdh.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ecdsa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/engine.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/err.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/evp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ex_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/hkdf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/hmac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/hrss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/is_boringssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/lhash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/md4.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/md5.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/mem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/nid.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/obj.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/obj_mac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/objects.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/opensslconf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/opensslv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ossl_typ.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/pem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/pkcs12.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/pkcs7.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/pkcs8.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/poly1305.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/rand.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/rc4.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ripemd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/rsa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/safestack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/sha.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/siphash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/span.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/srtp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/ssl3.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/stack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/thread.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/tls1.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/type_check.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/x509.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/x509_vfy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/include/openssl/x509v3.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/ssl/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/curve25519_32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/curve25519_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/curve25519_tables.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/p256.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/p256_32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/boringssl/src/third_party/fiat/p256_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/clog/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_googlecloudplatform_google_cloud_cpp/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/alarm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/client_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/completion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/create_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/create_channel_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/ext/health_check_service_server_builder_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/generic/async_generic_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/generic/generic_stub.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/grpc++.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/health_check_service_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/channel_argument_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/client_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/async_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/async_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/call_hook.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/channel_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/client_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/client_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/completion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/completion_queue_tag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/config_protobuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/core_codegen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/core_codegen_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/create_auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/grpc_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/metadata_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/method_handler_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/proto_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/rpc_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/rpc_service_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/security/auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/serialization_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/server_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/server_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/service_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/status_code_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/string_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/stub_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/sync_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/codegen/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/grpc_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/method_handler_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/rpc_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/rpc_service_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/serialization_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/server_builder_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/server_builder_plugin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/server_initializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/service_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/sync_cxx11.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/impl/sync_no_cxx11.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/resource_quota.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/security/auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/security/auth_metadata_processor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/security/credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/security/server_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/server_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/server_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/server_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/async_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/async_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/channel_arguments.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/status_code_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/string_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/stub_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/sync_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc++/support/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/byte_buffer_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/census.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/compression.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/fork.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/grpc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/grpc_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/grpc_security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/grpc_security_constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/atm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/atm_gcc_atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/atm_gcc_sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/atm_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/byte_buffer_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/compression_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/connectivity_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/fork.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/gpr_slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/gpr_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/grpc_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/log.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/port_platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/propagation_bits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/sync_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/sync_generic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/sync_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/impl/codegen/sync_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/load_reporting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/slice_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/alloc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/atm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/atm_gcc_atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/atm_gcc_sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/atm_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/log.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/log_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/port_platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/string_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/sync_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/sync_generic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/sync_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/sync_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/thd_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpc/support/workaround_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/alarm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/alarm_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/channel_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/client_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/completion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/completion_queue_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/create_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/create_channel_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/create_channel_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/create_channel_posix_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/ext/health_check_service_server_builder_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/generic/async_generic_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/generic/generic_stub.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/generic/generic_stub_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/grpcpp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/health_check_service_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/health_check_service_interface_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/channel_argument_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/client_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/async_generic_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/async_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/async_stream_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/async_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/async_unary_call_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/call_hook.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/call_op_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/call_op_set_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/callback_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/channel_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_callback_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_context_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/client_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/completion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/completion_queue_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/completion_queue_tag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/config_protobuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/core_codegen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/core_codegen_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/create_auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/delegating_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/grpc_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/intercepted_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/interceptor_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/message_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/metadata_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/method_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/method_handler_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/proto_buffer_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/proto_buffer_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/proto_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/rpc_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/rpc_service_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/security/auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/serialization_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_callback_handlers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_callback_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_context_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/server_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/service_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/status_code_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/string_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/stub_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/sync_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/sync_stream_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/codegen/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/grpc_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/method_handler_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/rpc_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/rpc_service_method.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/serialization_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/server_builder_option.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/server_builder_option_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/server_builder_plugin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/server_initializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/server_initializer_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/service_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/sync_cxx11.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/impl/sync_no_cxx11.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/resource_quota.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/resource_quota_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/auth_metadata_processor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/auth_metadata_processor_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/credentials_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/server_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/server_credentials_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/security/tls_credentials_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_builder_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/server_posix_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/async_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/async_stream_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/async_unary_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/async_unary_call_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/byte_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/channel_arguments.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/channel_arguments_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/client_callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/client_callback_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/client_interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/message_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/method_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/proto_buffer_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/proto_buffer_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/server_callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/server_callback_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/server_interceptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/status_code_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/string_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/stub_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/sync_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/sync_stream_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/include/grpcpp/support/validate_service_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/backend_metric.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/backup_poller.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/client_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/client_channel_channelz.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/client_channel_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/global_subchannel_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/health/health_check_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/http_connect_handshaker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/http_proxy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/subchannel_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy/xds/xds.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/lb_policy_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/local_subchannel_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/parse_address.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/proxy_mapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/proxy_mapper_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver/fake/fake_resolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolver_result_parsing.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/resolving_lb_policy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/retry_throttle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/server_address.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/service_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/subchannel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/subchannel_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/subchannel_pool_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_bootstrap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_channel_args.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/client_channel/xds/xds_client_stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/deadline/deadline_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/http/client/http_client_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/http/client_authority_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/http/message_compress/message_compress_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/http/server/http_server_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/max_age/max_age_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/message_size/message_size_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/workarounds/workaround_cronet_compression_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/filters/workarounds/workaround_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/alpn/alpn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/client/authority.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/client/chttp2_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/server/chttp2_server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/bin_decoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/bin_encoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/context_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/flow_control.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_goaway.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_ping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_rst_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_settings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/frame_window_update.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/hpack_encoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/hpack_parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/hpack_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/http2_settings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/huffsyms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/incoming_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/stream_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/chttp2/transport/varint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/transport/inproc/inproc_transport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/auth/cert.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/cds.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/cluster/circuit_breaker.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/cluster/filter.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/cluster/outlier_detection.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/address.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/base.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/config_source.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/grpc_service.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/health_check.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/http_uri.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/core/protocol.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/discovery.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/eds.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/api/v2/endpoint/load_report.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/service/discovery/v2/ads.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/service/load_stats/v2/lrs.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/type/http.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/type/percent.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/envoy/type/range.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/gogoproto/gogo.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/api/annotations.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/api/http.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/any.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/descriptor.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/duration.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/empty.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/struct.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/timestamp.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/protobuf/wrappers.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/google/rpc/status.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/ext/upb-generated/validate/validate.upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/avl/avl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/backoff/backoff.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channel_args.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channel_stack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channel_stack_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channel_trace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channelz.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/channelz_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/connected_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/handshaker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/handshaker_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/handshaker_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/channel/status_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/algorithm_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/compression_args.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/compression_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/message_compress.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/stream_compression.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/stream_compression_gzip.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/compression/stream_compression_identity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/debug/stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/debug/stats_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/debug/trace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/alloc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/arena.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/murmur_hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/spinlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/string.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/string_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/time_precise.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/tls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/tls_gcc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/tls_msvc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/tls_pthread.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/tmpfile.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gpr/useful.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/arena.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/debug_location.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/fork.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/global_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/global_config_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/global_config_env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/global_config_generic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/host_port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/inlined_vector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/manual_constructor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/mpscq.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/optional.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/orphanable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/ref_counted.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/ref_counted_ptr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/string_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/sync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/gprpp/thd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/http/format_request.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/http/httpcli.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/http/parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/block_annotate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/buffer_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/call_combiner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/cfstream_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/closure.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/combiner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/endpoint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/endpoint_cfstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/endpoint_pair.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/error_cfstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/error_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/ev_epoll1_linux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/ev_epollex_linux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/ev_poll_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/ev_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/exec_ctx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/executor/mpmcqueue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/executor/threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/gethostname.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/grpc_if_nametoindex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/internal_errqueue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/iocp_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/iomgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/iomgr_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/iomgr_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/iomgr_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/is_epollexclusive_available.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/load_file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/lockfree_event.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/logical_thread.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/nameser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/polling_entity.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_set_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_set_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_uv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/pollset_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/python_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/resolve_address.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/resolve_address_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/resource_quota.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sockaddr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sockaddr_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sockaddr_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sockaddr_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sockaddr_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/socket_factory_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/socket_mutator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/socket_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/socket_utils_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/socket_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/sys_epoll_wrapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_client_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_server_utils_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/tcp_windows.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/time_averaged_stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/timer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/timer_custom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/timer_generic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/timer_heap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/timer_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/udp_server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/unix_sockets_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/wakeup_fd_pipe.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/iomgr/wakeup_fd_posix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/json/json.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/profiling/timers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/context/security_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/alts/alts_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/alts/check_gcp_environment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/alts/grpc_alts_credentials_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/composite/composite_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/fake/fake_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/google_default/google_default_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/iam/iam_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/jwt/json_token.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/jwt/jwt_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/jwt/jwt_verifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/local/local_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/oauth2/oauth2_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/plugin/plugin_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/ssl/ssl_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/tls/grpc_tls_credentials_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/credentials/tls/tls_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/alts/alts_security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/fake/fake_security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/load_system_roots.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/load_system_roots_linux.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/local/local_security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/ssl/ssl_security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/ssl_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/ssl_utils_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/security_connector/tls/tls_security_connector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/transport/auth_filters.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/transport/secure_endpoint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/transport/security_handshaker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/transport/target_authority_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/transport/tsi_error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/security/util/json_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/b64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/percent_encoding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/slice_hash_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/slice_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/slice_string_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/slice_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/slice/slice_weak_hash_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/api_trace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/call_test_only.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/channel_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/channel_stack_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/completion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/completion_queue_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/event_string.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/lame_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/server.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/surface/validate_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/bdp_estimator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/byte_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/connectivity_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/error_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/http2_errors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/metadata_batch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/pid_controller.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/static_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/status_conversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/status_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/timeout_encoding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/transport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/transport/transport_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/lib/uri/uri_parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/crypt/gsec.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/frame_protector/alts_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/frame_protector/alts_crypter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/frame_protector/alts_frame_protector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/frame_protector/frame_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/alts_handshaker_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/alts_shared_resource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/alts_tsi_handshaker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/alts_tsi_handshaker_private.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/alts_tsi_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/handshaker/transport_security_common_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/fake_transport_security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/grpc_shadow_boringssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/local_transport_security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/ssl/session_cache/ssl_session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/ssl/session_cache/ssl_session_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/ssl_transport_security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/ssl_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/transport_security.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/transport_security_grpc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/core/tsi/transport_security_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/client/create_channel_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/client/secure_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/common/channel_filter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/common/secure_auth_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/common/tls_credentials_options_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/server/dynamic_thread_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/server/external_connection_acceptor_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/server/health/default_health_check_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/server/secure_server_credentials.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/server/thread_pool_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/src/cpp/thread_manager/thread_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/third_party/address_sorting/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/third_party/address_sorting/address_sorting_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_github_grpc_grpc/third_party/address_sorting/include/address_sorting/address_sorting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_google_absl/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_google_protobuf/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/bitmap256.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/filtered_re2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/prefilter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/prefilter_tree.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/prog.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/re2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/regexp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/unicode_casefold.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/unicode_groups.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/re2/walker-inl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/flags.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/pod_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/sparse_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/sparse_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/strutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/utf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/com_googlesource_code_re2/util/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/cpuinfo/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/COPYING" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/curl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/curlver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/easy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/mprintf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/multi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/stdcheaders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/typecheck-gcc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl/urlapi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/include/curl_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/altsvc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/amigaos.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/arpa_telnet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/asyn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/c-hyper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-amigaos.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-dos.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-mac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-os400.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-plan9.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-riscos.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-tpf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-vxworks.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-win32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/config-win32ce.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/conncache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/connect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/content_encoding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/cookie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_addrinfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_base64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_ctype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_des.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_endian.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_fnmatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_get_line.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_gethostname.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_gssapi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_hmac.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_krb5.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_ldap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_md4.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_md5.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_memrchr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_multibyte.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_ntlm_core.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_ntlm_wb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_printf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_range.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_rtmp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_sasl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_setup.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_setup_once.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_sha256.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_sspi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curl_threads.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/curlx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/dict.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/doh.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/dotdot.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/dynbuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/easyif.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/easyoptions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/escape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/fileinfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/formdata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/ftp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/ftplistparser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/getinfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/gopher.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/hostcheck.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/hostip.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/hsts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_aws_sigv4.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_chunks.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_digest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_negotiate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_ntlm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/http_proxy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/if2ip.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/imap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/inet_ntop.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/inet_pton.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/llist.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/memdebug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/mime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/mqtt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/multihandle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/multiif.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/netrc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/non-ascii.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/nonblock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/parsedate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/pingpong.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/pop3.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/progress.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/psl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/quic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/rand.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/rename.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/rtsp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/select.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/sendf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/setopt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/setup-vms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/share.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/sigpipe.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/slist.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/smb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/smtp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/sockaddr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/socketpair.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/socks.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/speedcheck.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/splay.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/strcase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/strdup.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/strerror.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/strtok.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/strtoofft.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/system_win32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/telnet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/tftp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/timeval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/transfer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/url.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/urlapi-int.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/urldata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vauth/digest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vauth/ntlm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vauth/vauth.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/version_win32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vquic/ngtcp2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vquic/quiche.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vquic/vquic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vssh/ssh.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/bearssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/gskit.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/gtls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/keylog.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/mbedtls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/mbedtls_threadlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/mesalink.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/nssg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/openssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/rustls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/schannel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/sectransp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/vtls.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/vtls/wolfssl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/warnless.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/wildcard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/curl/lib/x509asn1.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/dill_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/dlpack/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/dlpack/include/dlpack/dlpack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/bignum-dtoa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/bignum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/cached-powers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/diy-fp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/double-conversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/fast-dtoa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/fixed-dtoa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/ieee.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/strtod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/double_conversion/double-conversion/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/eigen_archive/COPYING.MPL2" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/enum34_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/farmhash_archive/COPYING" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/farmhash_archive/src/farmhash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/fft2d/readme2d.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/flatbuffers/LICENSE.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/functools32_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gast_archive/PKG-INFO" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/fixedpoint/fixedpoint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/fixedpoint/fixedpoint_avx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/fixedpoint/fixedpoint_msa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/fixedpoint/fixedpoint_neon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/fixedpoint/fixedpoint_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/block_params.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/compute.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/detect_platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/dispatch_gemm_shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_avx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_default.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_msa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_neon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_reference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/kernel_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/multi_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/output.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/output_avx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/output_msa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/output_neon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/output_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/pack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/pack_avx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/pack_msa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/pack_neon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/pack_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/simd_wrappers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/simd_wrappers_common_neon_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/simd_wrappers_msa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/simd_wrappers_neon.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/simd_wrappers_sse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/single_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/internal/unpack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/legacy_multi_thread_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/legacy_multi_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/legacy_multi_thread_gemv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/legacy_operations_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/legacy_single_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/multi_thread_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/multi_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/multi_thread_transform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/quantized_mul_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/quantized_mul_kernels_arm_32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/quantized_mul_kernels_arm_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/single_thread_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/single_thread_transform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/streams.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/streams_arm_32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/streams_arm_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/transform_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/transform_kernels_arm_32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/meta/transform_kernels_arm_64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/profiling/instrumentation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/profiling/profiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/profiling/pthread_everywhere.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/public/bit_depth.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/public/gemmlowp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/public/map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gemmlowp/public/output_stages.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gif/COPYING" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gif/gif_hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gif/gif_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/gif/gif_lib_private.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/highwayhash/arch_specific.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/highwayhash/compiler_specific.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/highwayhash/endianess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/highwayhash/sip_hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/highwayhash/highwayhash/state_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/bmpset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/brkeng.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/bytesinkutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/capi_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/charstr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/cmemory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/cpputils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/cstr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/cstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/cwchar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/dictbe.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/dictionarydata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/icuplugimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/localsvc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/locbased.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/locmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/locutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/messageimpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/msvcres.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/norm2_nfc_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/norm2allmodes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/normalizer2impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/patternprops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/pluralmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/propname.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/propname_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/propsvec.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/punycode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/putilimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbi_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbidata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbinode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbirb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbirpt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbiscan.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbisetb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/rbbitblb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/resource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ruleiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/serv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/servloc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/servnotf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/sharedobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/sprpimpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/static_unicode_sets.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uarrsort.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uassert.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ubidi_props.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ubidi_props_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ubidiimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ubrkimpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucase_props_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucasemap_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uchar_props_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucln.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucln_cmn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucln_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucmndata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnv_bld.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnv_cnv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnv_ext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnv_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnv_io.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucnvmbcs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucol_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucol_swp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucptrie_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ucurrimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/udatamem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/udataswp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uelement.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uenumimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uhash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uinvchar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ulayout_props.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ulist.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ulocimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/umapfile.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/umutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/appendable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/brkiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/bytestream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/bytestrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/bytestriebuilder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/caniter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/casemap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/char16ptr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/chariter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/dbbi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/docmain.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/dtintrv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/edits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/enumset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/errorcode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/filteredbrk.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/icudataver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/icuplug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/idna.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/localebuilder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/localpointer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/locdspnm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/locid.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/messagepattern.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/normalizer2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/normlzr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/parseerr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/parsepos.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ptypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/putil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/rbbi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/rep.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/resbund.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/schriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/simpleformatter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/std_string.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/strenum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/stringoptions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/stringpiece.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/stringtriebuilder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/symtable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ubidi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ubiditransform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ubrk.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucasemap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uchar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucharstrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucharstriebuilder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uchriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uclean.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucnv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucnv_cb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucnv_err.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucnvsel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucpmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucptrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ucurr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/udata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/udisplaycontext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uenum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uidna.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uldnames.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uloc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/umachine.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/umisc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/umutablecptrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unifilt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unifunct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unimatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uniset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unistr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unorm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/unorm2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/urename.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/urep.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ures.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uscript.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/usetiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ushape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/usprep.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ustring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/ustringtrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utf16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utf32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utf8.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utf_old.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/utypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uvernum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unicode/uversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unifiedcache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unisetspan.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unistrappender.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/unormimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uposixdefs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uprops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uresdata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uresimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ureslocs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/usc_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uset_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ustr_cnv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ustr_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ustrenum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/ustrfmt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/utracimp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/utrie.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/utrie2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/utrie2_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/utypeinfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uvector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uvectr32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/uvectr64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/icu/icu4c/source/common/wintz.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/kissfft/COPYING" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/LICENSE.md" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jccolext.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jchuff.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jconfigint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdcoefct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdcol565.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdcolext.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdhuff.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdmainct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdmaster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdmrg565.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdmrgext.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jdsample.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jerror.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jinclude.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jmemsys.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jmorecfg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jpeg_nbits_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jpegcomp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jpegint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jpeglib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jsimd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jsimddct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jstdhuff.c" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/libjpeg_turbo/jversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/LICENSE.TXT" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AMXIncGen/mlir/Dialect/AMX/AMX.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AMXIncGen/mlir/Dialect/AMX/AMX.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AMXIncGen/mlir/Dialect/AMX/AMXDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AVX512IncGen/mlir/Dialect/AVX512/AVX512.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AVX512IncGen/mlir/Dialect/AVX512/AVX512.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AVX512IncGen/mlir/Dialect/AVX512/AVX512Dialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffineMemoryOpInterfacesIncGen/mlir/Dialect/Affine/IR/AffineMemoryOpInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffineMemoryOpInterfacesIncGen/mlir/Dialect/Affine/IR/AffineMemoryOpInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffineOpsIncGen/mlir/Dialect/Affine/IR/AffineOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffineOpsIncGen/mlir/Dialect/Affine/IR/AffineOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffineOpsIncGen/mlir/Dialect/Affine/IR/AffineOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AffinePassIncGen/mlir/Dialect/Affine/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmNeonIncGen/mlir/Dialect/ArmNeon/ArmNeon.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmNeonIncGen/mlir/Dialect/ArmNeon/ArmNeon.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmNeonIncGen/mlir/Dialect/ArmNeon/ArmNeonDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmSVEIncGen/mlir/Dialect/ArmSVE/ArmSVE.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmSVEIncGen/mlir/Dialect/ArmSVE/ArmSVE.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmSVEIncGen/mlir/Dialect/ArmSVE/ArmSVEDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmSVEIncGen/mlir/Dialect/ArmSVE/ArmSVETypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ArmSVEIncGen/mlir/Dialect/ArmSVE/ArmSVETypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncOpsIncGen/mlir/Dialect/Async/IR/AsyncOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncOpsIncGen/mlir/Dialect/Async/IR/AsyncOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncOpsIncGen/mlir/Dialect/Async/IR/AsyncOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncOpsIncGen/mlir/Dialect/Async/IR/AsyncOpsTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncOpsIncGen/mlir/Dialect/Async/IR/AsyncOpsTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/AsyncPassIncGen/mlir/Dialect/Async/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinAttributesIncGen/mlir/IR/BuiltinAttributes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinAttributesIncGen/mlir/IR/BuiltinAttributes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinDialectIncGen/mlir/IR/BuiltinDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinLocationAttributesIncGen/mlir/IR/BuiltinLocationAttributes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinLocationAttributesIncGen/mlir/IR/BuiltinLocationAttributes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinOpsIncGen/mlir/IR/BuiltinOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinOpsIncGen/mlir/IR/BuiltinOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinTypesIncGen/mlir/IR/BuiltinTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/BuiltinTypesIncGen/mlir/IR/BuiltinTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CallOpInterfacesIncGen/mlir/Interfaces/CallInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CallOpInterfacesIncGen/mlir/Interfaces/CallInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CastOpInterfacesIncGen/mlir/Interfaces/CastInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CastOpInterfacesIncGen/mlir/Interfaces/CastInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ComplexBaseIncGen/mlir/Dialect/Complex/IR/ComplexOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ComplexOpsIncGen/mlir/Dialect/Complex/IR/ComplexOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ComplexOpsIncGen/mlir/Dialect/Complex/IR/ComplexOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ControlFlowInterfacesIncGen/mlir/Interfaces/ControlFlowInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ControlFlowInterfacesIncGen/mlir/Interfaces/ControlFlowInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ConversionPassIncGen/mlir/Conversion/Passes.capi.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ConversionPassIncGen/mlir/Conversion/Passes.capi.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ConversionPassIncGen/mlir/Conversion/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CopyOpInterfaceIncGen/mlir/Interfaces/CopyOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/CopyOpInterfaceIncGen/mlir/Interfaces/CopyOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/DLTIBaseIncGen/mlir/Dialect/DLTI/DLTIDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/DerivedAttributeOpInterfaceIncGen/mlir/Interfaces/DerivedAttributeOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/DerivedAttributeOpInterfaceIncGen/mlir/Interfaces/DerivedAttributeOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUBaseIncGen/mlir/Dialect/GPU/GPUOpInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUBaseIncGen/mlir/Dialect/GPU/GPUOpInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUBaseIncGen/mlir/Dialect/GPU/GPUOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUOpsIncGen/mlir/Dialect/GPU/GPUOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUOpsIncGen/mlir/Dialect/GPU/GPUOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUPassIncGen/mlir/Dialect/GPU/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUToNVVMGen/GPUToNVVM.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUToROCDLTGen/GPUToROCDL.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/GPUToSPIRVIncGen/GPUToSPIRV.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/InferTypeOpInterfaceIncGen/mlir/Interfaces/InferTypeOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/InferTypeOpInterfaceIncGen/mlir/Interfaces/InferTypeOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMArmSVEIncGen/mlir/Dialect/LLVMIR/LLVMArmSVE.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMArmSVEIncGen/mlir/Dialect/LLVMIR/LLVMArmSVE.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMArmSVEIncGen/mlir/Dialect/LLVMIR/LLVMArmSVEDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMConversionIncGen/mlir/Dialect/LLVMIR/LLVMConversionEnumsFromLLVM.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMConversionIncGen/mlir/Dialect/LLVMIR/LLVMConversionEnumsToLLVM.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMConversionIncGen/mlir/Dialect/LLVMIR/LLVMConversions.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMDialectAttributesIncGen/mlir/Dialect/LLVMIR/LLVMOpsAttrDefs.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMDialectAttributesIncGen/mlir/Dialect/LLVMIR/LLVMOpsAttrDefs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMDialectInterfaceIncGen/mlir/Dialect/LLVMIR/LLVMOpsInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMDialectInterfaceIncGen/mlir/Dialect/LLVMIR/LLVMOpsInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMOpsIncGen/mlir/Dialect/LLVMIR/LLVMOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMOpsIncGen/mlir/Dialect/LLVMIR/LLVMOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMOpsIncGen/mlir/Dialect/LLVMIR/LLVMOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMOpsIncGen/mlir/Dialect/LLVMIR/LLVMOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMOpsIncGen/mlir/Dialect/LLVMIR/LLVMOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LLVMPassIncGen/mlir/Dialect/LLVMIR/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgInterfacesIncGen/mlir/Dialect/Linalg/IR/LinalgInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgInterfacesIncGen/mlir/Dialect/Linalg/IR/LinalgInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgOpsIncGen/mlir/Dialect/Linalg/IR/LinalgOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgOpsIncGen/mlir/Dialect/Linalg/IR/LinalgOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgOpsIncGen/mlir/Dialect/Linalg/IR/LinalgOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgPassIncGen/mlir/Dialect/Linalg/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgSparseOpsIncGen/mlir/Dialect/Linalg/IR/LinalgSparseOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgSparseOpsIncGen/mlir/Dialect/Linalg/IR/LinalgSparseOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgStructuredOpsIncGen/mlir/Dialect/Linalg/IR/LinalgStructuredOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LinalgStructuredOpsIncGen/mlir/Dialect/Linalg/IR/LinalgStructuredOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LoopLikeInterfaceIncGen/mlir/Interfaces/LoopLikeInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/LoopLikeInterfaceIncGen/mlir/Interfaces/LoopLikeInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MLIRShapeCanonicalizationIncGen/ShapeCanonicalization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MathBaseIncGen/mlir/Dialect/Math/IR/MathOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MathOpsIncGen/mlir/Dialect/Math/IR/MathOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MathOpsIncGen/mlir/Dialect/Math/IR/MathOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MemRefBaseIncGen/mlir/Dialect/MemRef/IR/MemRefOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MemRefOpsIncGen/mlir/Dialect/MemRef/IR/MemRefOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/MemRefOpsIncGen/mlir/Dialect/MemRef/IR/MemRefOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/NVVMConversionIncGen/mlir/Dialect/LLVMIR/NVVMConversions.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/NVVMOpsIncGen/mlir/Dialect/LLVMIR/NVVMOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/NVVMOpsIncGen/mlir/Dialect/LLVMIR/NVVMOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/NVVMOpsIncGen/mlir/Dialect/LLVMIR/NVVMOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpAsmInterfaceIncGen/mlir/IR/OpAsmInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpAsmInterfaceIncGen/mlir/IR/OpAsmInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenACCOpsIncGen/mlir/Dialect/OpenACC/OpenACCOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenACCOpsIncGen/mlir/Dialect/OpenACC/OpenACCOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenACCOpsIncGen/mlir/Dialect/OpenACC/OpenACCOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenACCOpsIncGen/mlir/Dialect/OpenACC/OpenACCOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenACCOpsIncGen/mlir/Dialect/OpenACC/OpenACCOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenMPOpsIncGen/mlir/Dialect/OpenMP/OpenMPOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenMPOpsIncGen/mlir/Dialect/OpenMP/OpenMPOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenMPOpsIncGen/mlir/Dialect/OpenMP/OpenMPOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenMPOpsIncGen/mlir/Dialect/OpenMP/OpenMPOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/OpenMPOpsIncGen/mlir/Dialect/OpenMP/OpenMPOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLInterpOpsIncGen/mlir/Dialect/PDLInterp/IR/PDLInterpOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLInterpOpsIncGen/mlir/Dialect/PDLInterp/IR/PDLInterpOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLInterpOpsIncGen/mlir/Dialect/PDLInterp/IR/PDLInterpOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLOpsIncGen/mlir/Dialect/PDL/IR/PDLOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLOpsIncGen/mlir/Dialect/PDL/IR/PDLOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLOpsIncGen/mlir/Dialect/PDL/IR/PDLOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLTypesIncGen/mlir/Dialect/PDL/IR/PDLOpsTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/PDLTypesIncGen/mlir/Dialect/PDL/IR/PDLOpsTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ParallelLoopMapperAttrGen/mlir/Dialect/GPU/ParallelLoopMapperAttr.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ParallelLoopMapperAttrGen/mlir/Dialect/GPU/ParallelLoopMapperAttr.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ParallelLoopMapperAttrGen/mlir/Dialect/GPU/ParallelLoopMapperEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ParallelLoopMapperAttrGen/mlir/Dialect/GPU/ParallelLoopMapperEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ParserTokenKinds/TokenKinds.def" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/QuantOpsIncGen/mlir/Dialect/Quant/QuantOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/QuantOpsIncGen/mlir/Dialect/Quant/QuantOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/QuantOpsIncGen/mlir/Dialect/Quant/QuantOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/QuantPassIncGen/mlir/Dialect/Quant/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ROCDLOpsIncGen/mlir/Dialect/LLVMIR/ROCDLOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ROCDLOpsIncGen/mlir/Dialect/LLVMIR/ROCDLOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ROCDLOpsIncGen/mlir/Dialect/LLVMIR/ROCDLOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/RegionKindInterfaceIncGen/mlir/IR/RegionKindInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/RegionKindInterfaceIncGen/mlir/IR/RegionKindInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SCFIncGen/mlir/Dialect/SCF/SCFOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SCFIncGen/mlir/Dialect/SCF/SCFOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SCFIncGen/mlir/Dialect/SCF/SCFOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SCFPassIncGen/mlir/Dialect/SCF/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVAttrUtilsGen/mlir/Dialect/SPIRV/IR/SPIRVAttrUtils.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVAvailabilityIncGen/mlir/Dialect/SPIRV/IR/SPIRVAvailability.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVAvailabilityIncGen/mlir/Dialect/SPIRV/IR/SPIRVAvailability.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVAvailabilityIncGen/mlir/Dialect/SPIRV/IR/SPIRVOpAvailabilityImpl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVCanonicalizationIncGen/SPIRVCanonicalization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVCapabilityImplication.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVEnumAvailability.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVEnumAvailability.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVOpsIncGen/mlir/Dialect/SPIRV/IR/SPIRVOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVPassIncGen/mlir/Dialect/SPIRV/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SPIRVSerializationGen/mlir/Dialect/SPIRV/IR/SPIRVSerialization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ShapeOpsIncGen/mlir/Dialect/Shape/IR/ShapeOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ShapeOpsIncGen/mlir/Dialect/Shape/IR/ShapeOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ShapeOpsIncGen/mlir/Dialect/Shape/IR/ShapeOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ShapeToStandardGen/ShapeToStandard.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ShapeTransformsPassIncGen/mlir/Dialect/Shape/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SideEffectInterfacesIncGen/mlir/Interfaces/SideEffectInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SideEffectInterfacesIncGen/mlir/Interfaces/SideEffectInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsIncGen/mlir/Dialect/StandardOps/IR/Ops.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsIncGen/mlir/Dialect/StandardOps/IR/Ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsIncGen/mlir/Dialect/StandardOps/IR/OpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsIncGen/mlir/Dialect/StandardOps/IR/OpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsIncGen/mlir/Dialect/StandardOps/IR/OpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/StandardOpsTransformsPassIncGen/mlir/Dialect/StandardOps/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SymbolInterfacesIncGen/mlir/IR/SymbolInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/SymbolInterfacesIncGen/mlir/IR/SymbolInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TensorBaseIncGen/mlir/Dialect/Tensor/IR/TensorOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TensorOpsIncGen/mlir/Dialect/Tensor/IR/TensorOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TensorOpsIncGen/mlir/Dialect/Tensor/IR/TensorOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TensorPassIncGen/mlir/Dialect/Tensor/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaDialectIncGen/mlir/Dialect/Tosa/IR/TosaOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaDialectIncGen/mlir/Dialect/Tosa/IR/TosaOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaDialectIncGen/mlir/Dialect/Tosa/IR/TosaOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaDialectIncGen/mlir/Dialect/Tosa/IR/TosaStructs.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaDialectIncGen/mlir/Dialect/Tosa/IR/TosaStructs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaInterfacesIncGen/mlir/Dialect/Tosa/IR/TosaInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaInterfacesIncGen/mlir/Dialect/Tosa/IR/TosaInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TosaPassIncGen/mlir/Dialect/Tosa/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TransformsPassIncGen/mlir/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TransformsPassIncGen/mlir/Transforms/Transforms.capi.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/TransformsPassIncGen/mlir/Transforms/Transforms.capi.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorInterfacesIncGen/mlir/Interfaces/VectorInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorInterfacesIncGen/mlir/Interfaces/VectorInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorOpsIncGen/mlir/Dialect/Vector/VectorOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorOpsIncGen/mlir/Dialect/Vector/VectorOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorOpsIncGen/mlir/Dialect/Vector/VectorOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorOpsIncGen/mlir/Dialect/Vector/VectorOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/VectorOpsIncGen/mlir/Dialect/Vector/VectorOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ViewLikeInterfaceIncGen/mlir/Interfaces/ViewLikeInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/_virtual_includes/ViewLikeInterfaceIncGen/mlir/Interfaces/ViewLikeInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/AffineAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/AffineStructures.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/AliasAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/AliasAnalysis/LocalAliasAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/BufferAliasAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/CallGraph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/LinearTransform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/Liveness.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/LoopAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/NestedMatcher.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/NumberOfExecutions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/Presburger/Fraction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/Presburger/Matrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/Presburger/Simplex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/PresburgerSet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/SliceAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Analysis/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/AffineToStandard/AffineToStandard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/ArmSVEToLLVM/ArmSVEToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/AsyncToLLVM/AsyncToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/ComplexToLLVM/ComplexToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUCommon/GPUCommonPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUToNVVM/GPUToNVVMPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUToROCDL/GPUToROCDLPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUToSPIRV/GPUToSPIRV.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUToSPIRV/GPUToSPIRVPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/GPUToVulkan/ConvertGPUToVulkanPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/LinalgToSPIRV/LinalgToSPIRV.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/LinalgToSPIRV/LinalgToSPIRVPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/LinalgToStandard/LinalgToStandard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/OpenMPToLLVM/ConvertOpenMPToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/PDLToPDLInterp/PDLToPDLInterp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/Passes.capi.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/Passes.capi.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToGPU/SCFToGPU.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToGPU/SCFToGPUPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToOpenMP/SCFToOpenMP.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToSPIRV/SCFToSPIRV.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToSPIRV/SCFToSPIRVPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SCFToStandard/SCFToStandard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SPIRVToLLVM/SPIRVToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/SPIRVToLLVM/SPIRVToLLVMPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/ShapeToStandard/ShapeToStandard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/StandardToLLVM/ConvertStandardToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/StandardToLLVM/ConvertStandardToLLVMPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/StandardToSPIRV/StandardToSPIRV.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/StandardToSPIRV/StandardToSPIRVPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/TosaToLinalg/TosaToLinalg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/TosaToSCF/TosaToSCF.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/TosaToStandard/TosaToStandard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/VectorToLLVM/ConvertVectorToLLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/VectorToROCDL/VectorToROCDL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/VectorToSCF/VectorToSCF.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/VectorToSPIRV/VectorToSPIRV.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Conversion/VectorToSPIRV/VectorToSPIRVPass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AMX/AMX.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AMX/AMX.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AMX/AMXDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AMX/AMXDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AMX/Transforms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AVX512/AVX512.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AVX512/AVX512.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AVX512/AVX512Dialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AVX512/AVX512Dialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/AVX512/Transforms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineMemoryOpInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineMemoryOpInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineMemoryOpInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/IR/AffineValueMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Affine/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmNeon/ArmNeon.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmNeon/ArmNeon.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmNeon/ArmNeonDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmNeon/ArmNeonDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVE.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVE.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVEDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVEDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVETypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/ArmSVE/ArmSVETypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/Async.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncOpsTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncOpsTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/IR/AsyncTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Async/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/CommonFolders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Complex/IR/Complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Complex/IR/ComplexOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Complex/IR/ComplexOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Complex/IR/ComplexOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/DLTI/DLTI.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/DLTI/DLTIDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/DLTI/Traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUOpInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUOpInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/GPUOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/MemoryPromotion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/ParallelLoopMapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/ParallelLoopMapperAttr.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/ParallelLoopMapperAttr.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/ParallelLoopMapperEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/ParallelLoopMapperEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/GPU/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/FunctionCallUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMArmSVE.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMArmSVE.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMArmSVEDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMArmSVEDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMConversionEnumsFromLLVM.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMConversionEnumsToLLVM.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMConversions.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsAttrDefs.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsAttrDefs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMOpsInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/LLVMTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/NVVMConversions.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/NVVMDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/NVVMOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/NVVMOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/NVVMOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/ROCDLDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/ROCDLOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/ROCDLOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/ROCDLOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/Transforms/LegalizeForExport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/LLVMIR/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Analysis/DependenceAnalysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/EDSC/FoldedIntrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgNamedStructuredOps.tcgen.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgNamedStructuredOps.tcgen.td" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgNamedStructuredOps.yamlgen.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgNamedStructuredOps.yamlgen.td" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgSparseOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgSparseOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgStructuredOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgStructuredOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/IR/LinalgTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Transforms/CodegenStrategy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Transforms/Hoisting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Transforms/Transforms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Linalg/Utils/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/IR/Math.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/IR/MathOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/IR/MathOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/IR/MathOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Math/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/MemRef/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/MemRef/IR/MemRef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/MemRef/IR/MemRefOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/MemRef/IR/MemRefOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/MemRef/IR/MemRefOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACC.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACCOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACCOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACCOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACCOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenACC/OpenACCOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/OpenMP/OpenMPOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDL.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOpsTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLOpsTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDL/IR/PDLTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDLInterp/IR/PDLInterp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDLInterp/IR/PDLInterpOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDLInterp/IR/PDLInterpOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/PDLInterp/IR/PDLInterpOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/FakeQuantSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/QuantizeUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Quant/UniformSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/SCF.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/SCFOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/SCFOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/SCFOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/Transforms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SCF/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SDBM/SDBM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SDBM/SDBMDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SDBM/SDBMExpr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/ParserUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVAttrUtils.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVAttributes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVAvailability.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVAvailability.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVCapabilityImplication.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVEnumAvailability.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVEnumAvailability.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVEnums.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVGLSLCanonicalization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVModule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOpAvailabilityImpl.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOpTraits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVSerialization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/SPIRVTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/TargetAndABI.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/TargetAndABI.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/IR/TargetAndABI.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/Transforms/SPIRVConversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/SPIRV/Utils/LayoutUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/IR/Shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/IR/ShapeCanonicalization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/IR/ShapeOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/IR/ShapeOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/IR/ShapeOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Shape/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/Ops.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/Ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/Ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/OpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/OpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/IR/OpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/Transforms/DecomposeCallGraphTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/Transforms/FuncConversions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/StandardOps/Utils/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/IR/Tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/IR/TensorOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/IR/TensorOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/IR/TensorOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tensor/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaStructs.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/IR/TosaStructs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Tosa/Utils/QuantUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Utils/StructuredOpsUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/EDSC/Intrinsics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOpsDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOpsEnums.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorOpsEnums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorTransforms.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Dialect/Vector/VectorUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/EDSC/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/AffineExpr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/AffineExprVisitor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/AffineMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/AsmState.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/AttributeSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Attributes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Block.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BlockAndValueMapping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BlockSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Builders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinAttributes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinAttributes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinAttributes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinDialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinDialect.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinLocationAttributes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinLocationAttributes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinOps.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinOps.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinOps.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinTypes.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/BuiltinTypes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Diagnostics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Dialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/DialectImplementation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/DialectInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Dominance.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/FunctionImplementation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/FunctionSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Identifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/ImplicitLocOpBuilder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/IntegerSet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Location.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/MLIRContext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Matchers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OpAsmInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OpAsmInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OpDefinition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OpImplementation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Operation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OperationSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/OwningOpRef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/PatternMatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Region.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/RegionGraphTraits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/RegionKindInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/RegionKindInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/RegionKindInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/StorageUniquerSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/SymbolInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/SymbolInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/SymbolTable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/TypeRange.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/TypeSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/TypeUtilities.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/UseDefLists.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Value.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Verifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/IR/Visitors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/InitAllDialects.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/InitAllPasses.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CallInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CallInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CallInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CastInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CastInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CastInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ControlFlowInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ControlFlowInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ControlFlowInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CopyOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CopyOpInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/CopyOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutAttrInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutAttrInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutTypeInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DataLayoutTypeInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DecodeAttributesInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DerivedAttributeOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DerivedAttributeOpInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/DerivedAttributeOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/FoldInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/InferTypeOpInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/InferTypeOpInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/InferTypeOpInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/LoopLikeInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/LoopLikeInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/LoopLikeInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/SideEffectInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/SideEffectInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/SideEffectInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/VectorInterfaces.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/VectorInterfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/VectorInterfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ViewLikeInterface.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ViewLikeInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Interfaces/ViewLikeInterface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/AnalysisManager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/Pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/PassInstrumentation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/PassManager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/PassOptions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Pass/PassRegistry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Rewrite/FrozenRewritePatternSet.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Rewrite/PatternApplicator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/DebugAction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/DebugCounter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/DebugStringHelper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/FileUtilities.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/IndentedOstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/InterfaceSupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/LLVM.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/LogicalResult.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/MathExtras.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/StorageUniquer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/ThreadLocalCache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/ToolUtilities.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Support/TypeID.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/Dialect/LLVMIR/LLVMToLLVMIRTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/Dialect/NVVM/NVVMToLLVMIRTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/Export.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/LLVMTranslationInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/ModuleTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/LLVMIR/TypeTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/SPIRV/SPIRVBinaryUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Target/SPIRV/Serialization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/BufferUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Bufferize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/DialectConversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/FoldUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/GreedyPatternRewriteDriver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/InliningUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/LocationSnapshot.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/LoopFusionUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/LoopUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/RegionUtils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Transforms.capi.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Transforms.capi.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/Utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/ViewOpGraph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/include/mlir/Transforms/ViewRegionGraph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUCommon/GPUOpsLowering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUCommon/IndexIntrinsicsOpLowering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUCommon/OpToFuncCallLowering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUToNVVM/GPUToNVVM.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUToROCDL/GPUToROCDL.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/GPUToSPIRV/GPUToSPIRV.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/PDLToPDLInterp/Predicate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/PDLToPDLInterp/PredicateTree.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Conversion/ShapeToStandard/ShapeToStandard.cpp.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Affine/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Async/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/GPU/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/LLVMIR/IR/TypeDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/LLVMIR/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Linalg/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Quant/IR/TypeDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Quant/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/SCF/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/SDBM/SDBMExprDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/SPIRV/IR/SPIRVCanonicalization.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/SPIRV/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Shape/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/StandardOps/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Dialect/Tensor/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/IR/AffineExprDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/IR/AffineMapDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/IR/AttributeDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/IR/IntegerSetDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/IR/TypeDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Parser/Lexer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Parser/Parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Parser/ParserState.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Parser/Token.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Parser/TokenKinds.def" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Pass/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Rewrite/ByteCode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Target/LLVMIR/DebugTranslation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Target/SPIRV/Serialization/Serializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/llvm-project/mlir/lib/Transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/lmdb/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/lmdb/lmdb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/lmdb/midl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_cuda/cuda/cuda/cuda_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/__multiarray_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/__ufunc_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/_neighborhood_iterator_imp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/_numpyconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/arrayobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/arrayscalars.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/halffloat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/multiarray_api.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/ndarrayobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/ndarraytypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/noprefix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_1_7_deprecated_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_3kcompat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_endian.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_interrupt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_math.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_no_deprecated_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/npy_os.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/numpyconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/old_defines.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/oldnumeric.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/random/bitgen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/random/distributions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/ufunc_api.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/ufuncobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/numpy_include/numpy/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/Python-ast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/Python.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/abstract.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/accu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/asdl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/ast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/bitset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/bltinmodule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/boolobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/bytearrayobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/bytes_methods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/bytesobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/cellobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/ceval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/classobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/code.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/codecs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/compile.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/complexobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/datetime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/descrobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/dictobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/dtoa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/enumobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/errcode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/eval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/fileobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/fileutils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/floatobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/frameobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/funcobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/genobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/graminit.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/grammar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/import.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/ceval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/condvar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/gil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/hamt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/import.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/mem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/pygetopt.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/pystate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/internal/warnings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/intrcheck.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/iterobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/listobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/longintrepr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/longobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/marshal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/memoryobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/metagrammar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/methodobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/modsupport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/moduleobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/namespaceobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/object.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/objimpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/odictobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/opcode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/osdefs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/osmodule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/parsetok.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/patchlevel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pgen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pgenheaders.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/py_curses.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyarena.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyatomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pycapsule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyctype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pydebug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pydtrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyerrors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyexpat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyfpe.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyhash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pylifecycle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pymacconfig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pymacro.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pymath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pymem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pyport.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pystate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pystrcmp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pystrhex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pystrtod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pythonrun.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pythread.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/pytime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/rangeobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/setobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/sliceobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/structmember.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/structseq.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/symtable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/sysmodule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/token.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/traceback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/tupleobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/typeslots.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/ucnhash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/unicodeobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/warnings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/weakrefobject.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_python/python_include/zope.proxy/proxy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/local_config_rocm/rocm/rocm/rocm_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_debug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_ocl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_ocl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_sycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_sycl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_sycl_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_threadpool.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_threadpool_iface.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/dnnl_version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn_debug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn_dnnl_mangling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/mkldnn_version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_config.h.in" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_debug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_ocl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_ocl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_sycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_sycl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_sycl_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_threadpool.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_threadpool_iface.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/include/oneapi/dnnl/dnnl_version.h.in" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/batch_normalization_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/bfloat16.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/binary_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/bit_cast.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/broadcast_strategy.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/c_types_map.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/compiler_workarounds.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/concat_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/convolution_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/counting_barrier.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/deconvolution_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/dnnl_thread.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/dnnl_thread_parallel_nd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/dnnl_traits.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/eltwise_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/engine.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/float16.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/gemm_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/gemm_types.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/gemm_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/guard_manager.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/inner_product_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/internal_defs.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/internal_desc_types.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/disable_warnings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/ittnotify.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/ittnotify_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/ittnotify_static.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/ittnotify_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/jitprofiling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/ittnotify/legacy/ittnotify.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/layer_normalization_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/lrn_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/math_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/matmul_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/memory.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/memory_debug.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/memory_desc_wrapper.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/memory_storage.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/memory_tracking.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/nstl.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/pooling_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/prelu_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_attr.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_cache.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_desc.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_exec_types.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_hashing.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/primitive_iterator.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/reduction_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/reorder_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/resampling_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/rnn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/rnn_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/rw_mutex.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/scratchpad.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/scratchpad_debug.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/shuffle_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/softmax_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/stream.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/sum_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/tag_traits.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/thread_local_storage.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/type_helpers.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/verbose.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/common/z_magic.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/acl_convolution_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/acl_gemm_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/acl_indirect_gemm_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/acl_winograd_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/cpu_barrier.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/cpu_isa_traits.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/cpu_reducer.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/injectors/injector_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/injectors/jit_uni_eltwise_injector.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_generator.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_op_imm_check.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_primitive_conf.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_x8s8s32x_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_sve_512_x8s8s32x_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_1x1_conv_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_batch_normalization_s8.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_eltwise.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_eltwise_int.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_i8i8_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_pool_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_reorder.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/aarch64/jit_uni_softmax.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/binary_injector_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_batch_normalization_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_batch_normalization_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_binary_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_concat_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_convolution_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_deconvolution_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_eltwise_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_engine.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_inner_product_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_layer_normalization_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_lrn_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_memory_storage.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_pooling_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_prelu_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_primitive.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_reduction_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_resampling_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_shuffle_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_softmax_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_stream.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/cpu_sum_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/dw_convolution_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/f32/gemm_utils_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/f32/ref_gemm_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/gemm.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/gemm_msan_unpoison.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/gemm_pack.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/os_blas.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/s8x8s32/ref_gemm_s8x8s32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm/s8x8s32/simple_gemm_s8s8s32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_convolution_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_inner_product.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_inner_product_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_x8s8s32x_conv_zp_src_pad_comp.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_x8s8s32x_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_x8s8s32x_convolution_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/gemm_x8s8s32x_inner_product.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/jit_utils/jit_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/jit_utils/linux_perf/linux_perf.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/cpu_matmul_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/gemm_based_common.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/gemm_bf16_matmul.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/gemm_f32_matmul.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/gemm_x8s8s32x_matmul.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/matmul_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/matmul/ref_matmul.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/nchw_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ncsp_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/nhwc_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/nspc_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/platform.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/primitive_attr_postops.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_binary.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_concat.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_eltwise.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_fused_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_inner_product.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_layer_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_lrn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_prelu.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_reduction.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_resampling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_shuffle.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_softmax.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/ref_sum.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/reorder/cpu_reorder.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/reorder/cpu_reorder_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/reorder/simple_reorder.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/resampling_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/rnn/cpu_rnn_pd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/rnn/postgemm_dispatcher.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/rnn/ref_rnn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/rnn/rnn_reorders.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/rnn/rnn_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_concat.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_layer_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_layer_normalization_kernels.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_q10n.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_resampling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/simple_sum.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/amx_tile_configure.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/brgemm/brgemm.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/brgemm/brgemm_amx.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/brgemm/brgemm_types.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/cpu_barrier.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/cpu_isa_traits.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/cpu_reducer.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/amx/jit_avx512_core_amx_copy_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/amx/jit_avx512_core_amx_gemm_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/bf16/common_s16.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/bf16/jit_avx512_core_gemm_bf16bf16f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/bf16/jit_avx512_core_gemv_bf16bf16f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/common_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_avx2_kernel_sgemm_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_avx512_common_gemm_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_avx512_core_gemm_smalln_tn_f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_avx_gemm_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_avx_gemv_t_f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_sse41_gemv_n_f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/f32/jit_sse41_gemv_t_f32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_driver.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_info.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_pack.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_pack_storage.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_partition.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_threading.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemm_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/gemv_driver.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/s8x8s32/common_u8.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/s8x8s32/jit_avx2_gemm_s8u8s32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/s8x8s32/jit_avx512_core_gemm_s8u8s32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/s8x8s32/jit_avx512_core_gemv_s8x8s32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm/s8x8s32/jit_avx512_core_kernel_gemv_s8x8s32_kern.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm_bf16_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/gemm_bf16_inner_product.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/injectors/injector_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/injectors/jit_uni_binary_injector.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/injectors/jit_uni_eltwise_injector.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/injectors/jit_uni_postops_injector.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/ip_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx2_1x1_conv_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx2_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx2_conv_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx2_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_conv_winograd_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_convolution_winograd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_common_resampling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_conv_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_amx_int8_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_dw_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16_sum.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_bf16cvt.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_f32_wino_conv_2x3.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_f32_wino_conv_4x3.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_f32_wino_conv_4x3_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_u8s8s32x_wino_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_1x1_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_avx512_core_x8s8s32x_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_1x1_conv.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_conv.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_conv_trans_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_conv_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_inner_product.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_inner_product_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_post_ops.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_primitive_conf.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_brgemm_transpose_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_gemm_inner_product_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_gemm_x8s8s32x_conv_zp_src_pad_comp.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_gemm_x8s8s32x_convolution_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_generator.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_primitive_conf.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_sse41_1x1_conv_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_sse41_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_sse41_conv_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_sse41_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_transpose_src_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_1x1_conv_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_batch_normalization_s8.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_binary.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_dw_conv_kernel_f32.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_dw_conv_kernel_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_dw_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_eltwise.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_eltwise_int.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_i8i8_binary.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_i8i8_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_layer_normalization_kernels.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_pool_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_pooling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_reorder.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_resampling.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_resampling_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_softmax.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_tbb_batch_normalization.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_1x1_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_1x1_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_1x1_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_conv_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_convolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/jit_uni_x8s8s32x_deconvolution.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_bwd_base.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_bwd_blocked.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_bwd_nhwc.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_fwd_base.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_fwd_blocked.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_fwd_nhwc.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_avx512_common_lrn_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_uni_lrn.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/jit_uni_lrn_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/lrn_avx512_blocked_executor.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/lrn_avx512_nhwc_executor.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/lrn_executor.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/lrn/lrn_executor_factory.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/matmul/brgemm_matmul.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/matmul/brgemm_matmul_copy_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/matmul/brgemm_matmul_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_prelu_backward.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_prelu_base_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_prelu_forward.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_prelu_reduction_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_prelu_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_uni_prelu_backward_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/prelu/jit_uni_prelu_forward_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_cell_postgemm_1_bwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_cell_postgemm_1_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_cell_postgemm_2_bwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_cell_postgemm_2_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_lbr_cell_postgemm_bwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_gru_lbr_cell_postgemm_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_lstm_cell_postgemm_bwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_lstm_cell_postgemm_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_lstm_cell_projection_postgemm_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_rnn_cell_postgemm_bwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_rnn_cell_postgemm_fwd.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/rnn/jit_uni_rnn_common_postgemm.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/shuffle/jit_uni_shuffle.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/shuffle/jit_uni_shuffle_kernel.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/wino_reorder.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/xbyak/xbyak.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/xbyak/xbyak_bin2hex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/xbyak/xbyak_mnemonic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/x64/xbyak/xbyak_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/mkl_dnn_v1/src/cpu/zero_point_utils.hpp" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nasm/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/internal/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/internal/dll.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/internal/headers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/internal/sem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/internal/wait_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/aarch64/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/alpha/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/arm/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/atomic_ind/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/c++11.futex/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/c++11/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/c++11/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/c11/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/clang/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/clang/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/cygwin/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/decc/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/freebsd/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc_new/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc_new_debug/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc_no_tls/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/gcc_old/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/lcc/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/lcc/nsync_time_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/linux/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/macos/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/macos/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/macos/platform_c++11_os.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/msvc/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/netbsd/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/netbsd/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/openbsd/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/osf1/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/pmax/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/posix/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/posix/nsync_time_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/posix/platform_c++11_os.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/ppc32/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/ppc64/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/s390x/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/shark/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/tcc/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/win32/atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/win32/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/win32/platform_c++11_os.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/x86_32/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/platform/x86_64/cputype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_atomic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_cpp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_cv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_debug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_mu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_mu_wait.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_note.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_once.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_time_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/nsync/public/nsync_waiter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/opt_einsum_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/org_python_pypi_backports_weakref/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/org_sqlite/sqlite3.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/org_sqlite/sqlite3ext.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pasta/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/png.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pngconf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pngdebug.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pnginfo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pnglibconf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pngpriv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/png/pngstruct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/attr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/buffer_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/cast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/chrono.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/complex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/class.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/descr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/internals.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/detail/typeid.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/embed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/eval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/functional.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/iostream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/numpy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/operators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/pybind11.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/pytypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/stl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/pybind11/_virtual_includes/pybind11/pybind11/stl_bind.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/ruy/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/six_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/COPYING" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/snappy-internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/snappy-sinksource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/snappy-stubs-internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/snappy-stubs-public.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/snappy/snappy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/sobol_data/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/sobol_data/sobol_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/tblib_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/termcolor_archive/COPYING.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/typing_extensions_archive/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/decode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/encode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/generated_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/msg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/port_def.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/port_undef.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/table.int.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/upb/upb/upb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/crc32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/deflate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/gzguts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/inffast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/inffixed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/inflate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/inftrees.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/trees.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/zconf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/zlib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/external/zlib/zutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/any.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/any.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/api.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/arena.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/arena_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/arena_test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/arenastring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/annotation_test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/code_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/command_line_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_enum_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_extension.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_map_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_message_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_message_layout_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_padding_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_primitive_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_string_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_unittest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/cpp/cpp_unittest.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_doc_comment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_enum_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_field_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_map_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_message_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_names.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_primitive_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_reflection_class.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_repeated_enum_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_repeated_message_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_source_generator_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/csharp/csharp_wrapper_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/importer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_doc_comment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_enum_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_enum_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_enum_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_extension.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_extension_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_generator_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_map_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_map_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message_builder_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_message_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_name_resolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_names.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_primitive_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_primitive_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_shared_code_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_string_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/java/java_string_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/js/js_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/js/well_known_types_embed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/mock_code_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_enum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_enum_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_extension.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_map_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_message_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_nsobject_methods.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_oneof.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/objectivec/objectivec_primitive_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/package_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/php/php_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/plugin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/plugin.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/python/python_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/ruby/ruby_generator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/scc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/subprocess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/compiler/zip_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/descriptor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/descriptor.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/descriptor_database.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/duration.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/dynamic_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/empty.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/extension_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/extension_set_inl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/field_mask.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_enum_reflection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_enum_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_message_reflection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_message_table_driven.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_message_table_driven_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/generated_message_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/has_bits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/implicit_weak_message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/inlined_string_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/coded_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/coded_stream_inl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/gzip_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/io_win32.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/package_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/printer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/strtod.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/tokenizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/zero_copy_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/zero_copy_stream_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/io/zero_copy_stream_impl_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_entry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_entry_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_field_inl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_field_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_lite_test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_test_util_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/map_type_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/message.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/message_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/message_unittest.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/metadata_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/package_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/parse_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/port_def.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/port_undef.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/proto3_lite_unittest.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/reflection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/reflection_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/reflection_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/repeated_field.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/source_context.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/struct.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/bytestream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/casts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/fastmem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/int128.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/map_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/mathlimits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/mathutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/once.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/platform_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/status_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/statusor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/stl_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/stringpiece.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/stringprintf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/strutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/substitute.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/template_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/stubs/time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/test_util.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/test_util2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/test_util_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/testing/file.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/testing/googletest.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/text_format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/timestamp.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/type.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/unknown_field_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/delimited_message_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/field_comparator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/field_mask_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/datapiece.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/default_value_objectwriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/error_listener.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/expecting_objectwriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/field_mask_utility.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/json_escaping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/json_objectwriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/json_stream_parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/location_tracker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/mock_error_listener.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/object_location_tracker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/object_source.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/object_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/proto_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/protostream_objectsource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/protostream_objectwriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/structured_objectwriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/type_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/type_info_test_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/internal/utility.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/json_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/message_differencer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/package_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/time_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/type_resolver.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/util/type_resolver_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/wire_format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/wire_format_lite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/google/protobuf/wrappers.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/assertions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/forwards.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/json.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/json_features.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/value.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/include/json/writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/c_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/c_api_experimental.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/c_api_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/c_api_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/conversion_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/abstract_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/abstract_function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/abstract_op_attrs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/abstract_operation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/abstract_tensor_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/c_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/c_api_experimental.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/c_api_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/c_api_unified_experimental.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/c_api_unified_experimental_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/dlpack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/immediate_execution_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/immediate_execution_distributed_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/immediate_execution_operation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/immediate_execution_tensor_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_cancellation_manager_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_context_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_executor_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_monitoring_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_op_attrs_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_op_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_tensor_debug_info_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/eager/tfe_tensorhandle_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/filesystem/filesystem_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/filesystem/modular_filesystem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/filesystem/modular_filesystem_registration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/grappler/grappler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/stream_executor/stream_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/experimental/stream_executor/stream_executor_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/kernels/tensor_shape_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tensor_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_attrtype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_datatype.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_file_statistics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_status_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_status_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_tensor_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/c/tf_tstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/grad_op_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/gradients.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/scope.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/scope_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/framework/while_gradients.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/array_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/array_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/audio_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/audio_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/candidate_sampling_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/candidate_sampling_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/const_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/control_flow_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/control_flow_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/data_flow_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/data_flow_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/functional_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/image_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/image_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/io_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/io_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/linalg_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/linalg_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/list_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/list_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/logging_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/logging_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/lookup_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/lookup_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/manip_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/manip_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/map_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/map_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/math_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/math_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/nn_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/nn_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/no_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/no_op_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/parsing_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/parsing_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/random_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/random_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/sparse_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/sparse_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/standard_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/state_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/state_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/string_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/string_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/training_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/training_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/user_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/user_ops_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/ops/while_loop.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/saved_model/bundle_v2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/saved_model/constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/saved_model/loader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/saved_model/loader_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/cc/saved_model/reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/build_xla_ops_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/clone_constants_for_better_clustering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/cluster_scoping_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/compilability_check_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/deadness_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/deadness_analysis_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/defs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/device_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/encapsulate_subgraphs_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/encapsulate_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/encapsulate_xla_computations_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/extract_outside_compilation_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/flags.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/force_xla_constants_on_host_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/increase_dynamism_for_auto_jit_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/introduce_floating_point_jitter_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/kernels/xla_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/mark_for_compilation_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/mark_for_compilation_pass_test_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/partially_decluster_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/report_clustering_info_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/resource_operation_safety_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/shape_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/shape_inference_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_activity.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_activity_listener.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_cluster_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_compilation_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_compile_on_demand_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_device_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_device_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_kernel_creator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_launch_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_platform_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/jit/xla_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/MhloPassIncGen/mlir-hlo/Dialect/mhlo/transforms/mhlo_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/chlo_ops_inc_gen/mlir-hlo/Dialect/mhlo/IR/chlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/hlo_ops_base_inc_gen/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/hlo_ops_inc_gen/mlir-hlo/Dialect/mhlo/IR/hlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/lhlo_gpu_ops_enums_inc_gen/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_enums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/lhlo_gpu_ops_inc_gen/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/lhlo_gpu_ops_structs_inc_gen/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/lhlo_ops_inc_gen/mlir-hlo/Dialect/mhlo/IR/lhlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/_virtual_includes/lhlo_ops_structs_inc_gen/mlir-hlo/Dialect/mhlo/IR/lhlo_ops_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/chlo_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/chlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base_enums.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base_enums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base_structs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_base_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/hlo_ops_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/infer_fusibility_op_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/infer_fusibility_op_interface.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_enums.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_enums.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_structs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_gpu_ops_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_ops_structs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/lhlo_ops_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/IR/register.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/PassDetail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/map_chlo_to_hlo_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/mhlo_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/rewriters.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/utils/broadcast_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/utils/convert_op_folder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/hlo/include/mlir-hlo/utils/hlo_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/lite/utils/validators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/mlir_bridge_rollout_policy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/mlir_graph_optimization_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/op_or_arg_name_mapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/analysis/per_function_aggregate_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/analysis/resource_alias_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/analysis/side_effect_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/dialect_registration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_all_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_attributes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_device.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_dialect.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_op_interfaces.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_op_interfaces.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_ops_a_m.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_ops_a_m.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_ops_n_z.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_ops_n_z.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_remaining_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_remaining_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_saved_model.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_saved_model.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_side_effects.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_structs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_structs.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_types.def" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tf_verifiers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tfrt_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/ir/tfrt_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/bridge.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/collection_ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/decompose_resource_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/einsum.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/lift_variables.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/passes_detail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/resource_op_lifting_cleanup.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/savedmodel_passes_detail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/tf_data_optimization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/tf_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/tf_saved_model_passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/tf_savedmodel_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/transforms/unroll_batch_matmul.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/export_graphdef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/export_tf_dialect_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/import_model.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/mlir_import_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/mlir_roundtrip_flags.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/translate/upgrade_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/attribute_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/bridge_logger.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/compile_mlir_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/convert_attr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/convert_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/convert_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/device_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/error_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/export_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/mangling_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/parse_text_proto.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/serialize_mlir_module_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/shape_inference_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/tpu_rewrite_device_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/translate_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/verification_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/verify_suitable_for_graph_export.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/visitor_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tensorflow/utils/xla_sharding_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/integration/graph_decompose_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/integration/node_expansion_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/integration/tfr_decompose_ctx.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/ir/tfr_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/ir/tfr_ops.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/ir/tfr_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/passes/passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/tfr/utils/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/utils/array_container_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/utils/name_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/utils/string_container_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/attribute_exporter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/attribute_importer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/hlo_function_importer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/hlo_module_importer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/hlo_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/ir/mlir_hlo_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/mlir_hlo_to_hlo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/transforms/passes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/transforms/xla_legalize_tf_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/transforms/xla_legalize_tf_passes_detail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/transforms/xla_passes.h.inc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/transforms/xla_passes_detail.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/mlir/xla/type_to_shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/cc/ops/xla_jit_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/cc/ops/xla_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/const_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/frontend_attributes_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/functionalize_cond.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/functionalize_control_flow.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/functionalize_control_flow_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/functionalize_while.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/graph_compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/host_compute_metadata.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/case_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/conv_op_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/cwise_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/elu_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/gather_op_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/if_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/if_while_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/image_resize_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/index_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/random_ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/reduction_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/relu_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/shape_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/tensor_list_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/kernels/while_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/lib/broadcast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/lib/data_format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/lib/random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/lib/scatter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/lib/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/literal_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/mlir_bridge_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/rearrange_function_argument.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/resource_operation_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/shape_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/sharding_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/side_effect_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/tf2xla.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/tf2xla_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/type_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_argument.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_compilation_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_expression.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_op_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_op_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/tf2xla/xla_resource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/array2d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/array3d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/array4d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/client_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/compile_only_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/executable_build_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/global_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/arithmetic.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/comparators.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/constants.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/conv_grad_size_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/loops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/math.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/matrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/pooling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/prng.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/qr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/quantize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/self_adjoint_eig.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/slicing.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/sorting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/svd.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/lib/tridiagonal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/local_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/padding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/sharding_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/value_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/xla_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/client/xla_computation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/comparison_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/cpu_function_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/debug_options_flags.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/debug_options_parsers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/executable_run_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/execution_options_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/index_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/iterator_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/layout.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/layout_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/literal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/literal_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/map_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/metric_table_report.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/overflow_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/parse_flags_from_env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/permutation_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/primitive_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/protobuf_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/refcounting_hash_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/algebraic_simplifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/all_gather_decomposer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/all_to_all_decomposer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/allocation_tracker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/backend.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/batch_dot_simplification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/batchnorm_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/buffer_assignment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/buffer_value.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/buffer_value_containers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/call_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/call_inliner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/channel_tracker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cholesky_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/collective_ops_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/comparison_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/compilation_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/compilation_stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/compile_only_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/computation_layout.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/computation_placer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/conditional_canonicalizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/conditional_simplifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/conditional_to_select.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/convolution_group_converter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/copy_insertion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/buffer_info_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/compiler_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/conv_canonicalization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_executable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_instruction_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_layout_assignment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/cpu_transfer_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/dot_op_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/elemental_ir_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/ir_emission_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/ir_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/ir_function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/llvm_ir_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/mlir_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/orc_jit_memory_mapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/parallel_loop_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/parallel_task_assignment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_conv2d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_conv2d_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_conv2d_mkl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_fft.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_fft_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_fork_join.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_fp16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_key_value_sort.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_lightweight_check.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_matmul.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_matmul_mkl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_pow.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_conv2d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_fft.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_matmul.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/runtime_topk.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/shape_partition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/simple_orc_jit.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/target_machine_features.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/tiled_dot_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/vector_support_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/windows_compatibility.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/cpu/xfeed_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/custom_call_target_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dfs_hlo_visitor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dfs_hlo_visitor_with_default.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dot_decomposer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dump.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dynamic_dimension_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dynamic_index_splitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dynamic_padder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dynamic_parameter_binding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/dynamic_window_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/eigh_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/elemental_ir_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/executable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/execution_tracker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/flatten_call_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/fusion_node_indexing_evaluation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/fusion_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gather_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/generic_transfer_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/global_device_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gpu/gpu_device_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gpu/gpu_executable_run_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gpu/launch_dimensions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gpu/parallel_loop_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/gpu/target_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/graphcycles/graphcycles.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/graphcycles/ordered_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/heap_simulator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_alias_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_casting_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_clone_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_computation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_constant_folding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_cost_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_creation_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_cse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_dataflow_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_dce.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_domain_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_domain_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_element_type_converter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_evaluator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_evaluator_typed_visitor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_execution_profile.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_execution_profile_data.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_graph_dumper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_input_output_alias_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_instruction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_instructions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_lexer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_live_range.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_memory_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_module.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_module_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_module_group.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_module_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_module_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_op_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_opcode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_ordering.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_parser.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_pass_fix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_pass_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_pass_pipeline.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_phi_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_profile_printer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_profile_printer_data.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_proto_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_query.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_reachability.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_schedule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_sharding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_sharding_metadata.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_subcomputation_unification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_value.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/hlo_verifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/human_readable_profile_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/indexed_array_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/instruction_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/layout_assignment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/alias_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/buffer_assignment_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/dynamic_update_slice_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/fused_ir_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/ir_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/ir_builder_mixin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/kernel_support_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/llvm_loop.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/llvm_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/loop_emitter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/math_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/llvm_ir/tuple_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/local_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/logical_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/logical_buffer_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/logistic_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/map_inliner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/maybe_owning_device_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/memory_space_assignment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/memory_space_assignment_repacking.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/memory_space_assignment_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/name_uniquer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/op_expander_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/operand_upcaster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/pattern_matcher.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/platform_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/qr_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/reshape_mover.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/rng_bit_generator_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/rng_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/scatter_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/service_executable_run_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/shape_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/shaped_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/slice_sinker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/slow_operation_alarm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/sort_simplifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/source_map_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/stream_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/topk_rewriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/transfer_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/transpose_folding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/tree_reduction_rewriter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/triangular_solve_expander.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/tuple_points_to_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/tuple_simplifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/tuple_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/while_loop_analysis.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/while_loop_constant_sinking.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/while_loop_invariant_code_motion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/while_loop_simplifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/while_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/xla_debug_info_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service/zero_sized_hlo_elimination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/service_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/shape_layout.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/shape_tree.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/shape_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/status_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/statusor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/union_find.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/window_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/xla.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/compiler/xla/xla_data.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/allocator_retry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/base_collective_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/bfc_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/buf_rendezvous.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/build_graph_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/collective_executor_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/collective_param_resolver_local.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/collective_rma_local.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/collective_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/colocation_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/composite_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/constant_folding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/copy_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/costmodel_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/debugger_state_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_event_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_host_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_id_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_id_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device/device_mem_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device_resolver_local.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/device_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/direct_session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/dma_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/attr_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/context_distributed_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/copy_to_device_node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/custom_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/custom_device_op_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/eager_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/eager_op_rewrite_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/eager_operation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/execute.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/execute_node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/kernel_and_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/placement_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/shape_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/tensor_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eager/tensor_handle_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/entry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/eval_const_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/executor_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/function_body.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/function_def_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/function_optimization_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/function_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_bfc_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_cudamalloc_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_cudamallocasync_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_debug_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_event_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_id_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_managed_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_process_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu/gpu_virtual_mem_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gpu_device_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/gradients.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/graph_constructor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/graph_def_builder_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/graph_execution_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/graph_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/graph_runner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/hierarchical_tree_broadcaster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/inline_function_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/input_colocation_exemption_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/inspecting_placer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/isolate_placer_inspection_required_ops_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/local_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/local_executor_params.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/lower_case_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/lower_function_call_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/lower_functional_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/lower_if_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/lower_while_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/memory_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/metrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/mkl_cpu_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/mkl_layout_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/mkl_tfconversion_pass.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/optimization_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/partitioning_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/permuter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/placer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/placer_inspection_required_ops_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_bfc_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_init.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_process_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_simple_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pluggable_device/pluggable_device_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/pool_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/process_function_library_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/process_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/process_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/profile_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/quantize_training.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/renamed_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/rendezvous_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/rendezvous_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/replicate_per_replica_nodes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/ring_alg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/ring_gatherer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/ring_reducer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/scoped_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/scoped_allocator_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/session_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/shape_refiner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/shared_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/single_threaded_cpu_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/stats_publisher_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/step_stats_collector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/common_runtime/threadpool_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/data/compression_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/data/dataset.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_callback_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_graph_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_io_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_node_key.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_service.grpc.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debug_service.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debugger_event_metadata.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/debug/debugger_state_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/base_rendezvous_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/call_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/cancellable_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/cluster_function_library_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/collective_param_resolver_distributed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/collective_rma_distributed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/device_resolver_distributed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/cluster_function_library_runtime.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/destroy_tensor_handle_node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/eager_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/eager_service_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/remote_copy_node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/remote_execute_node.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/remote_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/remote_tensor_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/eager/remote_tensor_handle_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/graph_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/local_master.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/master.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/master_env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/master_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/master_session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/message_wrappers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/partial_run_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/recent_request_ids.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/remote_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rendezvous_mgr_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/request_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/async_service_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/eager/grpc_eager_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/eager/grpc_eager_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/eager/grpc_eager_service_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_call.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_channel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_client_cq_tag.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_master_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_master_service_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_remote_worker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_response_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_server_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_tensor_coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_worker_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_worker_service.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/grpc_worker_service_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc/rpc_rendezvous_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/rpc_collective_executor_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/server_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/session_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/tensor_coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_cache_logger.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_cache_partial.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_cache_wrapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/distributed_runtime/worker_session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/example.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/example.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/example_parser_configuration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/example_parser_configuration.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/example_parser_configuration.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/feature.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/feature.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/example/feature_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/allocation_description.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/allocation_description.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/allocator_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/api_def.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/api_def.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/attr_value.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/attr_value.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/attr_value_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/bfloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/bounds_check.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/cancellation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/collective.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/common_shape_fns.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/control_flow.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/cost_graph.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/cost_graph.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/dataset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/dataset_options.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/dataset_options.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/dataset_stateful_op_allowlist.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/device_attributes.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/device_attributes.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/device_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/device_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/function.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/function.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/function_handle_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph_def_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph_to_functiondef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph_transfer_info.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/graph_transfer_info.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/kernel_def.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/kernel_def.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/kernel_def_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/kernel_def_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/kernel_shape_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/local_rendezvous.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/log_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/log_memory.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/log_memory.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/lookup_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/memory_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/metrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/model.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/model.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/model.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/node_def.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/node_def.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/node_def_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/node_def_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/node_properties.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/numeric_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/numeric_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_def.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_def.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_def_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_def_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_gen_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_requires.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/op_segment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/partial_tensor_shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/queue_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/reader_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/reader_base.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/reader_base.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/reader_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/reader_op_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/register_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/register_types_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/registration_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/rendezvous.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_handle.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_handle.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_mgr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_op_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/resource_var.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/rng_alg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/run_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/run_handler_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/selective_registration.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/session_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/shape_inference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/shared_ptr_variant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/stats_aggregator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/step_stats.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/step_stats.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/summary.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/summary.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_description.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_description.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_key.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_reference.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_shape.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_shape.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_shape.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_slice.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_slice.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tensor_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/thread_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/tracking_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/type_index.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/type_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/typed_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/types.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/types.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variable.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variable.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variant_encode_decode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variant_op_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/variant_tensor_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/versions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/versions.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/framework/versions.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/algorithm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/collective_order.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/colors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/control_flow.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/costmodel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/default_device.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/edgeset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/graph_def_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/graph_node_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/graph_partition.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/node_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/optimizer_cse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/subgraph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/tensor_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/testlib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/validate.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/graph/while_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/clusters/cluster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/clusters/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/clusters/virtual_cluster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/analytical_cost_estimator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/cost_estimator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/graph_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/graph_properties.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/op_context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/op_level_cost_estimator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/op_performance_data.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/op_performance_data.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/virtual_placer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/costs/virtual_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/devices.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/graph_topology_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/graph_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/grappler_item.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/grappler_item_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/inputs/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/mutable_graph_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/op_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/arithmetic_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/auto_mixed_precision.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/auto_mixed_precision_lists.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/auto_parallel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/common_subgraph_elimination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/constant_folding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/custom_graph_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/auto_shard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/autotune_buffer_sizes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/batch_parallelization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/disable_intra_op_parallelism.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/disable_prefetch_legacy_autotune.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/enable_gradient_descent.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/filter_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/filter_with_random_uniform_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/function_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/fusion_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/graph_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/hoist_random_uniform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/latency_all_edges.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/make_sloppy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/map_and_batch_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/map_and_filter_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/map_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/map_parallelization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/map_vectorization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/meta_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/noop_elimination.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/optimizer_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/parallel_batch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/reorder_data_discarding_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/shuffle_and_repeat_fusion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/slack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/use_private_thread_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/vectorization/vectorizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/vectorization/vectorizer_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/vectorization/wrapped_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/data/vectorization_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/debug_stripper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/dependency_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/evaluation_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/function_api_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/function_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/generic_layout_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/generic_layout_optimizer_transposer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/generic_layout_optimizer_transposer_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/graph_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/graph_optimizer_stage.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/implementation_selector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/loop_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/memory_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/meta_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/model_pruner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/pin_to_host_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/remapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/scoped_allocator_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/shape_optimizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/optimizers/static_schedule.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/canonicalizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/colocation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/frame.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/functions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/graph_view.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/graph_view_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/symbolic_shapes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/topological_sort.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/tpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/transitive_fanin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/utils/traversal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/verifiers/graph_verifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/grappler/verifiers/structure_verifier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/aggregate_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/aggregate_ops_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/argmax_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/assign_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/avgpooling_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batch_norm_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/adaptive_shared_batch_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/batch_resource_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/batch_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/concat_split_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/periodic_function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/shared_batch_scheduler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/batching_util/threadsafe_status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/betainc_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/bias_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/bias_op_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/bincount_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/boosted_trees.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/quantiles/quantile_stream_resource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/quantiles/weighted_quantiles_buffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/quantiles/weighted_quantiles_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/quantiles/weighted_quantiles_summary.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/resources.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/boosted_trees/tree_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/broadcast_to_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/bucketize_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cast_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cast_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/collective_nccl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/collective_nccl_broadcaster.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/collective_nccl_gatherer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/collective_nccl_reducer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/compare_and_bitpack_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/concat_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/concat_lib_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conditional_accumulator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conditional_accumulator_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conditional_accumulator_base_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/constant_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/control_flow_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_2d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_3d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_grad_input_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_grad_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_grad_shape_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_ops_fused_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/conv_ops_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cross_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cudnn_pooling_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cwise_op_clip.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cwise_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cwise_ops_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/cwise_ops_gradients.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/batch_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/cache_dataset_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/cache_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/captured_function.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/concatenate_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/dataset_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/dataset_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/assert_cardinality_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/assert_next_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/auto_shard_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/compression_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/directed_interleave_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/io_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/lmdb_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/map_and_batch_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/parallel_interleave_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/random_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/sampling_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/snapshot_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/snapshot_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/sql/driver_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/sql/query_connection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/sql/sqlite_query_connection.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/threadpool_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/experimental/unique_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/filter_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/finalize_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/fixed_length_record_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/flat_map_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/generator_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/get_options_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/hash_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/interleave_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/iterator_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/map_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/map_defun_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/model_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/name_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/optimize_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/optional_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/options_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/padded_batch_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/parallel_batch_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/parallel_interleave_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/parallel_map_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/prefetch_autotuner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/prefetch_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/random_seed_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/range_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/repeat_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/rewrite_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/serialization_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/shard_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/shuffle_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/single_threaded_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/skip_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/split_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/stats_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/take_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/tensor_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/tensor_slice_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/text_line_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/tf_record_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/unbounded_thread_pool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/window_dataset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/window_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data/zip_dataset_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/data_format_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/debug_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/deep_conv2d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/dense_update_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/depthtospace_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/depthwise_conv_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/diag_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/dilation_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_activations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_attention.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_backward_cuboid_convolutions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_backward_spatial_convolutions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_contraction_kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_convolution_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_cuboid_convolution.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_pooling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_spatial_convolutions-inl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/eigen_spatial_convolutions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fake_quant_ops_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fifo_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fill_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fractional_pool_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/function_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fused_batch_norm_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/fused_eigen_output_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gather_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gather_functor_batched.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gather_nd_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gather_nd_op_cpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gemm_functors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gpu_device_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gpu_device_array_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/gpu_prim.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/hinge-loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/histogram_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/host_constant_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/identity_n_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/identity_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/adjust_contrast_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/adjust_hsv_gpu.cu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/adjust_hue_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/adjust_saturation_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/colorspace_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/crop_and_resize_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/extract_image_patches_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/extract_volume_patches_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/image_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/mirror_pad_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/mirror_pad_op_cpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/non_max_suppression_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/resize_bilinear_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/resize_nearest_neighbor_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/sampling_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/image/scale_and_translate_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/immutable_constant_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/in_topk_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/initializable_lookup_table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/inplace_ops_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/l2loss_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/determinant_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/eig_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/einsum_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/einsum_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/linalg_ops_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/matrix_band_part_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/matrix_diag_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/matrix_set_diag_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/matrix_solve_ls_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/matrix_triangular_solve_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/qr_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/self_adjoint_eig_v2_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/linalg/svd_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/list_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/logging_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/logistic-loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/lookup_table_init_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/lookup_table_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/lookup_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/map_kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/matmul_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/matmul_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/maxpooling_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/meta_support.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/mfcc.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/mfcc_dct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/mfcc_mel_filterbank.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/multinomial_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/nextafter_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/no_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/nth_element_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/one_hot_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/pad_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/padding_fifo_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/parameterized_truncated_normal_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/partitioned_function_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/poisson-loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/pooling_ops_3d.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/pooling_ops_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/population_count_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/priority_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/quantization_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/quantize_and_dequantize_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/queue_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/queue_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/ragged_tensor_variant.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_binomial_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_op_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_op_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/random_poisson_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/range_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/record_yielder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reduction_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reduction_ops_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reduction_ops_common_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/redux_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reference_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/relu_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/relu_op_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reshape_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reshape_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/resource_variable_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reverse_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/reverse_sequence_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/rnn/blas_gemm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/rnn/gru_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/rnn/lstm_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/roll_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/save_restore_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/scan_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/scatter_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/scatter_nd_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/scatter_nd_op_cpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sdca_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/searchsorted_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/segment_reduction_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/segment_reduction_ops_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sendrecv_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/shape_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/slice_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/slice_op_cpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/smooth-hinge-loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/snapshot_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/softmax_op_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/softplus_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/softsign_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/spacetobatch_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/spacetodepth_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse/kernels.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse/sparse_matrix.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse/transpose_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse/zeros_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_conditional_accumulator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_fill_empty_rows_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_matmul_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_tensor_dense_add_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_tensor_dense_matmul_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_to_dense_op_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/sparse_xent_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/special_math/special_math_op_misc_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/spectrogram.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/split_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/squared-loss.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stateful_random_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stateful_random_ops_cpu_gpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stateless_random_gamma_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stateless_random_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/stateless_random_ops_v2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/strided_slice_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/strided_slice_op_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/string_to_hash_bucket_fast_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/string_to_hash_bucket_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/string_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/summary_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tensor_array.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tensor_list.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tensor_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tile_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tile_functor_cpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tile_ops_cpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tile_ops_gpu_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/tile_ops_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/topk_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/training_op_helpers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/training_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/transpose_functor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/transpose_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/typed_conditional_accumulator_base.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/typed_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/variable_ops.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/where_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/winograd_transform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/kernels/xent_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/bfloat16/bfloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/arena.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/bitmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/bits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/blocking_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/error_codes.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/error_codes.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/errors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/notification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/raw_coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/refcount.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/status_test_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/stringpiece.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/threadpool_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/core/threadpool_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/db/sqlite.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gif/gif_io.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/array_slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/cleanup.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/compactptrset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/edit_distance.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/flatmap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/flatrep.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/flatset.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/inlined_vector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/int_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/iterator_range.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/manual_constructor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/map_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/optional.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/priority_queue_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/subtle/map_traits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/gtl/top_n.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/hash/crc32c.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/hash/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/histogram/histogram.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/block.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/block_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/buffered_inputstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/compression.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/inputbuffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/inputstream_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/proto_encode_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/random_inputstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/record_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/record_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/snappy/snappy_compression_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/snappy/snappy_inputbuffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/snappy/snappy_inputstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/snappy/snappy_outputbuffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/table.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/table_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/table_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/two_level_iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/zlib_compression_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/zlib_inputstream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/io/zlib_outputbuffer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/jpeg/jpeg_handle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/jpeg/jpeg_mem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/llvm_rtti/llvm_rtti.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/math/math_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/collected_metrics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/collection_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/gauge.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/metric_def.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/mobile_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/mobile_gauge.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/mobile_percentile_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/mobile_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/percentile_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/timed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/monitoring/types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/png/png_io.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/distribution_sampler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/exact_uniform_int.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/philox_random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/philox_random_test_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/random_distributions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/simple_philox.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/random/weighted_picker.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/base64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/numbers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/ordered_code.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/proto_serialization.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/proto_text_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/scanner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/str_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/strcat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/strings/stringprintf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/lib/wav/wav_io.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/nccl/collective_communicator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/abi.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/base64.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/bfloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/blocking_counter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/byte_order.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/casts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/auth_provider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/compute_engine_metadata_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/compute_engine_zone_provider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/curl_http_request.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/expiring_lru_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/file_block_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/gcs_dns_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/gcs_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/gcs_throttle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/google_auth_provider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/http_request.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/oauth_client.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/ram_file_block_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/time_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cloud/zone_provider.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cord.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cpu_feature_guard.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cpu_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/ctstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/ctstring_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cuda.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/cuda_libdevice_path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/casts.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/context.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/cord.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/integral_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/mutex_data.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/notification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/posix_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/stacktrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/subprocess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/test_benchmark.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/tracing_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/default/unbounded_work_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/demangle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/denormal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/enable_tf2_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/env_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/errors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/file_statistics.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/file_system_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/fingerprint.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/gif.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/hadoop/hadoop_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/host_info.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/human_readable_json.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/init_main.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/jpeg.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/load_library.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/logger.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/mem.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/mutex.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/net.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/notification.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/null_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/numa.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/numbers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/platform_strings.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/platform_strings_computed.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/png.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/prefetch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/profile_utils/android_armv7a_cpu_utils_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/profile_utils/clock_cycle_profiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/profile_utils/cpu_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/profile_utils/i_cpu_utils_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/protobuf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/protobuf_compiler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/protobuf_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/ram_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/raw_coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/refcount.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/regexp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/resource.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/resource_loader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/retrying_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/retrying_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/rocm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/rocm_rocdl_path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/s3/aws_crypto.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/s3/aws_logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/s3/s3_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/scanner.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/setround.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/snappy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stack_frame.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stacktrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stacktrace_handler.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/str_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/strcat.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stream_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stream_executor_no_cuda.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stringpiece.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/stringprintf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/strong_hash.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/subprocess.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/tensor_coding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/tensor_float_32_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/test.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/test_benchmark.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/thread_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/threadpool_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/threadpool_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/tracing.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/tstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/platform/unbounded_work_queue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/internal/cpu/annotation_stack.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/internal/cpu/host_tracer_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/internal/cpu/traceme_recorder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/annotated_traceme.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/connected_traceme.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/profiler_factory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/profiler_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/profiler_session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/scoped_annotation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/traceme.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/lib/traceme_encode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profile.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_analysis.grpc.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_analysis.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_options.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_options.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_service.grpc.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_service.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/profiler_service_monitor_result.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/protobuf/xplane.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/protobuf/xplane.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/rpc/profiler_service_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/tfprof_log.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/tfprof_options.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/tfprof_output.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/file_system_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/parse_annotation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/tf_op_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/time_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/timespan.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/trace_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/xplane_builder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/xplane_schema.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/xplane_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/profiler/utils/xplane_visitor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/autotuning.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/bfc_memory_map.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/bfc_memory_map.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/cluster.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/cluster.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/config.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/config.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/control_flow.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/control_flow.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/debug.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/debug.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/debug_event.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/debug_event.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/device_filters.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/device_filters.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/device_properties.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/device_properties.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/eager_service.grpc.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/eager_service.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/error_codes.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/error_codes.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/extension_type_variant.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/extension_type_variant.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/graph_debug_info.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/graph_debug_info.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/master.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/meta_graph.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/meta_graph.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/named_tensor.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/named_tensor.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/queue_runner.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/queue_runner.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/remote_tensor_handle.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/remote_tensor_handle.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/replay_log.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/rewriter_config.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/rewriter_config.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saved_model.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saved_model.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saved_object_graph.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saved_object_graph.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saver.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/saver.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/service_config.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/service_config.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/snapshot.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/snapshot.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/struct.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/struct.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tensor_bundle.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tensor_bundle.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tensorflow_server.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tensorflow_server.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/compile_metadata.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/dynamic_padding.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/optimization_parameters.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/topology.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/tpu_embedding_configuration.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/tpu/tpu_embedding_output_layout.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/trackable_object_graph.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/trackable_object_graph.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/transport_options.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/transport_options.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/verifier_config.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/verifier_config.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/protobuf/worker.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/public/session.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/public/session_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/public/version.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/summary/schema.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/summary/summary_converter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/summary/summary_db_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/summary/summary_file_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/libtftpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_defs.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_embedding_optimization_parameters_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_embedding_output_layout_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_executor_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/tpu/tpu_ops_c_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/activation_mode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/batch_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/bcast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/command_line_flags.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_beam_entry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_beam_scorer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_beam_search.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_decoder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_loss_calculator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ctc/ctc_loss_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/debug_events_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/device_name_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/dump_graph.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/einsum_op_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/env_var.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/equal_graph_def.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/event.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/event.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/events_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/example_proto_fast_parsing.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/example_proto_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/exec_on_stall.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/gpu_cuda_alias.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/gpu_device_functions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/gpu_kernel_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/gpu_launch_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/guarded_philox_random.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/image_resizer_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/incremental_barrier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/managed_stack_trace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/matmul_autotune.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/matmul_bcast.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/memmapped_file_system.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/memmapped_file_system.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/memmapped_file_system.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/memmapped_file_system_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/mirror_pad_mode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/mkl_threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/mkl_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/overflow.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/padding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/permutation_input_iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/permutation_output_iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/presized_cuckoo_map.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/proto/decode.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/proto/descriptor_pool_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/proto/descriptors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/proto/proto_utils.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ptr_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ragged_to_dense_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/ragged_to_dense_util_common.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/reffed_status_callback.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/saved_tensor_slice.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/saved_tensor_slice.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/saved_tensor_slice_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/sparse/dim_comparator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/sparse/group_iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/sparse/sparse_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/stat_summarizer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/stat_summarizer_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/stats_calculator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/stream_executor_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/strided_slice_op.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_bundle/byte_swap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_bundle/naming.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_bundle/tensor_bundle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_format.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_ops_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_slice_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_slice_reader_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_slice_set.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_slice_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/tensor_slice_writer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/test_log.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/test_log.proto" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/transform_output_iterator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/use_cudnn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/work_sharder.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/core/util/xla_config_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/include/external/eigen_archive/COPYING.MPL2" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/client/session_ref.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/client/tf_session_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/eager/pywrap_gradient_exclusions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/eager/pywrap_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/eager/pywrap_tensor_conversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/eager/pywrap_tfe.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/framework/python_op_gen.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/framework/python_op_gen_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/grappler/model_analyzer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/bfloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/ndarray_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/ndarray_tensor_bridge.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/numpy.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/py_exception_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/py_func.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/py_seq_tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/py_util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/pybind11_absl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/pybind11_lib.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/pybind11_proto.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/pybind11_status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/safe_ptr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/core/safe_pyobject_ptr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/lib/io/py_record_reader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/util/kernel_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/util/stack_trace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/python/util/util.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/allocator_stats.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/blas.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/cuda/cuda_dnn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/cuda/cuda_platform_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/data_type.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/device_description.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/device_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/device_memory_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/device_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/dnn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/dnn.pb.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/event.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/executor_cache.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/fft.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/gpu_launch_dim.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host/host_gpu_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host/host_platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host/host_platform_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host/host_stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host/host_timer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/host_or_device_scalar.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/kernel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/kernel_cache_config.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/kernel_spec.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/launch_dim.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/array_slice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/demangle.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/env.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/error.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/human_readable.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/initialize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/mathutil.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/numbers.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/path.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/process_state.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/stacktrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/static_threadlocal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/status.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/status_macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/statusor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/statusor_internals.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/thread_options.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/lib/threadpool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/module_spec.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/multi_platform_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/default/dso_loader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/default/initialize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/dso_loader.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/initialize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/logging.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/platform/port.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/plugin.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/plugin_registry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/rng.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/rocm/rocm_platform_id.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/scratch_allocator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/stream.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/stream_executor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/stream_executor_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/stream_executor_pimpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/temporary_device_memory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/temporary_memory_manager.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tf_allocator_adapter.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/timer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/c_api_conversions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/c_api_decl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/c_api_defn.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/proto_helper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/tpu_executor_c_api.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/tpu_platform_interface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/tpu/tpu_topology.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/tensorflow/stream_executor/trace_listener.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/Cholesky" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/Core" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/Eigenvalues" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/LU" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/OrderingMethods" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/QR" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/SVD" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/SparseCholesky" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/Eigen/SparseCore" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/LICENSE" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/FixedPoint" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/Tensor" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/ThreadPool" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/FixedPointTypes.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/MatMatProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/MatMatProductAVX2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/MatMatProductNEON.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/MatVecProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/PacketMathAVX.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/PacketMathAVX2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/PacketMathAVX512.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/TypeCastingAVX2.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/CXX11/src/FixedPoint/TypeCastingAVX512.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/MatrixFunctions" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/third_party/eigen3/unsupported/Eigen/SpecialFunctions" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/CMakeLists.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/Tensor" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/TensorSymmetry" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/ThreadPool" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/README.md" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/Tensor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorArgMax.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorAssign.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorBase.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorBlock.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorBroadcasting.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorChipping.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorConcatenation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContraction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionBlocking.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionCuda.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionGpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionMapper.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionSycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorContractionThreadPool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorConversion.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorConvolution.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorConvolutionSycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorCostModel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorCustomOp.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDevice.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceCuda.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceDefault.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceGpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceSycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceThreadPool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDimensionList.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorDimensions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorEvalTo.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorEvaluator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorExecutor.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorExpr.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorFFT.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorFixedSize.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorForcedEval.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorForwardDeclarations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorGenerator.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorGlobalFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorGpuHipCudaDefines.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorGpuHipCudaUndefines.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorIO.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorIndexList.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorInflation.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorInitializer.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorIntDiv.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorLayoutSwap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorMacros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorMap.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorMeta.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorMorphing.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorPadding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorPatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorRandom.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorReductionCuda.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorReductionGpu.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorReductionSycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorRef.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorReverse.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorScan.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorScanSycl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorShuffling.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorStorage.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorStriding.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorTrace.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorTraits.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorUInt128.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/Tensor/TensorVolumePatch.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/TensorSymmetry/DynamicSymmetry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/TensorSymmetry/StaticSymmetry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/TensorSymmetry/Symmetry.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/TensorSymmetry/util/TemplateGroupTheory.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/Barrier.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/EventCount.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/NonBlockingThreadPool.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/RunQueue.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/ThreadCancel.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/ThreadEnvironment.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/ThreadLocal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/ThreadPoolInterface.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/ThreadPool/ThreadYield.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/util/CXX11Meta.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/util/CXX11Workarounds.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/util/EmulateArray.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/CXX11/src/util/MaxSizeVector.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/FFT" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/KroneckerProduct" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/MatrixFunctions" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/SpecialFunctions" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/FFT/ei_fftw_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/FFT/ei_kissfft_impl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/KroneckerProduct/KroneckerTensorProduct.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/MatrixExponential.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/MatrixFunction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/MatrixLogarithm.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/MatrixPower.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/MatrixSquareRoot.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/MatrixFunctions/StemFunction.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsArrayAPI.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsBFloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsHalf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/BesselFunctionsPacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/HipVectorCompatibility.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsArrayAPI.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsBFloat16.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsFunctors.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsHalf.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsImpl.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/SpecialFunctionsPacketMath.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/AVX/BesselFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/AVX/SpecialFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/AVX512/BesselFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/AVX512/SpecialFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/GPU/SpecialFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/NEON/BesselFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/include/unsupported/Eigen/src/SpecialFunctions/arch/NEON/SpecialFunctions.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/utils/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/libtensorflow_framework.2.5.0.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/libtensorflow_framework.2.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/libtensorflow_framework.dylib" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/examples/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/examples/lstm/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/examples/lstm/rnn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/examples/lstm/rnn_cell.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/ops/gen_audio_microfrontend_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/python/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/python/ops/_audio_microfrontend_op.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/microfrontend/python/ops/audio_microfrontend_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/tensorboard/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/experimental/tensorboard/ops_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/convert.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/convert_saved_model.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/interpreter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/interpreter_wrapper/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/interpreter_wrapper/_pywrap_tensorflow_interpreter_wrapper.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/lite.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/lite_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/metrics_interface.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/metrics_portable.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/op_hint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/optimize/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/optimize/_pywrap_tensorflow_lite_calibration_wrapper.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/optimize/calibrator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/schema_py_generated.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/schema_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/tflite_convert.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/tflite_keras_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/python/wrap_toco.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/logging/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/logging/gen_html.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/logging/toco_conversion_log_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/model_flags_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/python/toco_from_protos.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/toco_flags_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/toco/types_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/tools/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/lite/tools/visualize.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_dtypes.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_errors_test_helper.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_op_def_registry.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_op_def_util.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_proto_comparators.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_python_memory_checker_helper.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_debug_events_writer.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_device_lib.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_events_writer.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_mlir.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_parallel_device.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_py_exception_registry.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_python_api_dispatcher.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_python_api_info.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_python_api_parameter_converter.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_python_op_gen.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_quantize_training.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tensorflow_internal.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tf_cluster.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tf_item.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tf_optimizer.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tf_session.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tfcompile.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_tfe.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/_pywrap_toco_api.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/asserts.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/break_statements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/call_trees.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/conditional_expressions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/continue_statements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/control_flow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/control_flow_deprecated_py2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/directives.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/functions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/list_comprehensions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/lists.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/logical_expressions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/return_statements.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/slices.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/converters/variables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/ag_ctx.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/config_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/converter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/converter_testing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/function_wrappers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/core/unsupported_features_checker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/impl/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/impl/api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/impl/conversion.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/impl/testing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/impl/testing/pybind_for_testing.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/lang/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/lang/directives.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/lang/special_functions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/conditional_expressions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/data_structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/exceptions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/logical.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/py_builtins.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/slices.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/operators/variables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/anno.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/ast_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/cfg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/common_transformers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/common_transformers/anf.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/error_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/gast_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/inspect_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/naming.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/origin_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/pretty_printer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/qual_names.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/activity.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/annos.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/liveness.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/reaching_definitions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/reaching_fndefs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/static_analysis/type_inference.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/templates.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/testing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/testing/basic_definitions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/testing/decorators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/transformer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/pyct/transpiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/ag_logging.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/context_managers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/misc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/py_func.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/tensor_list.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/tensors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/autograph/utils/testing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/client_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/device_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/pywrap_tf_session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/client/timeline.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compat/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compat/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compat/v2_compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/mlir/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/mlir/mlir.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/tensorrt/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/tensorrt/test/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/tensorrt/test/tf_trt_integration_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/tensorrt/trt_convert.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/tensorrt/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/xla/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/xla/jit.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/compiler/xla/xla.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/benchmarks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/benchmarks/benchmark_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/kernel_tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/kernel_tests/data_service_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/batching.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/cardinality.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/compression_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/counter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/data_service_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/distribute.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/distribute_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/enumerate_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/error_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/get_single_element.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/grouping.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/interleave_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/iterator_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/map_defun.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/matching_files.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/optimization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/optimization_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/parsing_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/prefetching_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/readers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/resampling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/scan_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/shuffle_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/snapshot.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/stats_aggregator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/stats_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/stats_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/take_while_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/testing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/threading_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/threadpool.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/unique.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/ops/writers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/service/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/service/_pywrap_server_lib.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/service/_pywrap_utils.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/experimental/service/server_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/kernel_tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/kernel_tests/checkpoint_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/kernel_tests/test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/kernel_tests/tf_record_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/iterator_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/multi_device_iterator_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/optional_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/ops/readers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/convert.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/nest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/random_seed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/sparse.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/structure.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/data/util/traverse.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/analyzer_cli.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/base_ui.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/cli_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/cli_shared.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/cli_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/command_parser.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/curses_ui.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/curses_widgets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/debugger_cli_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/evaluator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/offline_analyzer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/profile_analyzer_cli.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/readline_ui.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/tensor_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/cli/ui_factory.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/debug_mnist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v1/debug_errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v1/debug_fibonacci.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v1/debug_keras.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v1/debug_mnist_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v2/debug_fibonacci_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/examples/v2/debug_mnist_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/check_numerics_callback.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_data.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_events_monitors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_events_reader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_events_writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_gradients.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_graphs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_service_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/debug_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/dumping_callback.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/dumping_callback_test_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/grpc_debug_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/grpc_debug_test_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/grpc_tensorflow_server.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/op_callbacks_common.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/profiling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/session_debug_testlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/source_remote.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/lib/source_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/dumping_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/framework.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/grpc_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/hooks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/debug/wrappers/local_cli_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/central_storage_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/gce_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/kubernetes_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/sagemaker_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/slurm_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/tfconfig_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/tpu/tpu_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cluster_resolver/tpu_cluster_resolver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/collective_all_reduce_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/collective_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/coordinator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/coordinator/cluster_coordinator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/coordinator/metric_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/coordinator/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cross_device_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/cross_device_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/device_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_coordinator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_coordinator_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/distribution_strategy_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/estimator_training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/input_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/input_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/mirrored_run.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/mirrored_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/multi_process_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/multi_process_runner.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/multi_worker_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/multi_worker_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/numpy_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/one_device_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/packed_distributed_variable.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/parallel_device/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/parallel_device/parallel_device.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/parallel_device/saving.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/parameter_server_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/parameter_server_strategy_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/ps_values.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/reduce_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/sharded_variable.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/shared_variable_creator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/single_loss_example.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/step_fn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/strategy_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/strategy_test_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/summary_op_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/tpu_strategy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/tpu_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/tpu_values.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/v1/all_reduce.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/values.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/distribute/values_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/dlpack/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/dlpack/dlpack.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/backprop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/backprop_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/benchmarks_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/cancellation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/execute.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/executor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/forwardprop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/forwardprop_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/graph_only_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/imperative_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/lift_to_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/memory_tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/memory_tests/memory_test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/monitoring.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/profiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/profiler_client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/remote.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/tape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/eager/wrap_function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/baseline.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/boosted_trees.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/boosted_trees_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/dnn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/dnn_linear_combined.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/head.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/linear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/metric_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/optimizers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/parsing_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/canned/prediction_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/estimator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/estimator_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/export/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/export/export.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/export/export_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/export/export_output.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/exporter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/gc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/inputs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/numpy_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/pandas_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/queues/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/queues/feeding_functions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/inputs/queues/feeding_queue_runner.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/keras.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/model_fn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/run_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/estimator/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/feature_column.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/feature_column_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/feature_column_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/sequence_feature_column.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/feature_column/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/auto_control_deps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/auto_control_deps_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/c_api_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/common_shapes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/composite_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/constant_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/convert_to_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/cpp_shape_inference_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/device.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/device_spec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/dtypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/error_interpolation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/errors.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/errors_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/fast_tensor_util.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/framework_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/function_def_to_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/gpu_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/graph_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/graph_to_function_def.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/graph_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/graph_util_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/importer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/indexed_slices.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/kernels.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/load_library.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/memory_checker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/meta_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/op_callbacks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/op_def_registry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/python_memory_checker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/random_seed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/registry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/smart_cond.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/sparse_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/subscribe.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/tensor_conversion_registry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/tensor_shape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/tensor_spec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/tensor_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/test_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/test_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/tfrt_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/traceable_stack.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/type_spec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/framework/versions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/grappler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/grappler/cluster.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/grappler/item.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/grappler/tf_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/activations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v1/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/utils/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/_v2/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/activations/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/densenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/efficientnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/imagenet_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/inception_resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/inception_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/mobilenet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/mobilenet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/mobilenet_v3/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/nasnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/resnet/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/resnet50/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/resnet_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/vgg16/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/vgg19/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/applications/xception/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/backend/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/callbacks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/callbacks/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/constraints/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/boston_housing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/cifar10/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/cifar100/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/fashion_mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/imdb/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/mnist/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/datasets/reuters/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/layers/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/layers/experimental/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/metrics/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/mixed_precision/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/models/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/optimizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/optimizers/schedules/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/preprocessing/image/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/preprocessing/sequence/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/preprocessing/text/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/regularizers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/api/keras/wrappers/scikit_learn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/densenet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/efficientnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/imagenet_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/inception_resnet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/inception_v3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/mobilenet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/mobilenet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/mobilenet_v3.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/nasnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/resnet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/resnet_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/vgg16.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/vgg19.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/applications/xception.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/backend.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/backend_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/benchmarks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/benchmarks/benchmark_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/benchmarks/distribution_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/benchmarks/saved_model_benchmarks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/benchmarks/saved_model_benchmarks/saved_model_benchmark_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/callbacks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/callbacks_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/constraints.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/boston_housing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/cifar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/cifar10.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/cifar100.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/fashion_mnist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/imdb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/mnist.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/datasets/reuters.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/distribute_strategy_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/distributed_file_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/distributed_training_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/distributed_training_utils_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_correctness_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_dnn_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_embedding_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_image_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_rnn_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_stateful_lstm_model_correctness_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/keras_utils_test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/model_collection_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/model_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/multi_worker_testing_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/optimizer_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/saved_model_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/sidecar_evaluator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/simple_models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/strategy_combinations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/test_example.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/distribute/worker_training_state.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_preprocessing_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/compile_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/data_adapter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/functional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/input_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/input_spec.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/keras_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/node.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/partial_batch_padding_handler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/saving.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/sequential.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_arrays_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_distributed_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_eager_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_generator_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_utils_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/engine/training_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/feature_column/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/feature_column/base_feature_layer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/feature_column/dense_features.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/feature_column/dense_features_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/feature_column/sequence_feature_column.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/initializers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/initializers/initializers_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/initializers/initializers_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/keras_parameterized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/advanced_activations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/convolutional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/convolutional_recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/cudnn_recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/dense_attention.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/einsum_dense.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/embeddings.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/kernelized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/legacy_rnn/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/legacy_rnn/rnn_cell_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/legacy_rnn/rnn_cell_wrapper_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/local.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/merge.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/noise.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/normalization_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/pooling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/benchmarks/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/benchmarks/feature_column_benchmark.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/category_crossing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/category_encoding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/discretization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/hashing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/image_preprocessing.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/index_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/integer_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/preprocessing_stage.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/preprocessing_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/reduction.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/string_lookup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/table_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/preprocessing/text_vectorization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/recurrent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/recurrent_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/rnn_cell_wrapper_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/layers/wrappers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/convolutional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/legacy_tf_layers/pooling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/losses.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/metrics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/autocast_variable.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/device_compatibility_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/get_layer_policy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/loss_scale.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/loss_scale_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/policy.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/mixed_precision/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/models.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/adadelta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/adagrad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/adam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/adamax.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/ftrl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/gradient_descent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/learning_rate_schedule.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/legacy_learning_rate_decay.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/nadam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/rmsprop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizer_v2/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/optimizers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/premade/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/premade/linear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/premade/wide_deep.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/dataset_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/image.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/image_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/sequence.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/text.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/text_dataset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/preprocessing/timeseries.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/protobuf/projector_config_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/protobuf/saved_metadata_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/protobuf/versions_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/regularizers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/hdf5_format.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/model_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/base_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/json_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/layer_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/load.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/load_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/metric_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/model_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/network_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/save_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/serialized_attributes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saved_model_experimental.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/saving_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/export_output.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/export_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/mode_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/signature_def_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/saving/utils_v1/unexported_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/testing_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/tests/model_architectures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/tests/model_subclassing_test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/all_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/control_flow_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/conv_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/data_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/dataset_creator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/generic_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/io_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/kernelized_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/kpl_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/layer_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/losses_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/metrics_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/mode_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/multi_gpu_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/np_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/object_identity.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/tf_contextlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/tf_inspect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/tf_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/version_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/vis_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/wrappers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/keras/wrappers/scikit_learn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/bias_op_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/cudnn_deterministic_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/random/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/random/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/signal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/kernel_tests/signal/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/convolutional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/layers.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/normalization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/pooling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/layers/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/core/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/core/_pywrap_bfloat16.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/core/_pywrap_py_func.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/_pywrap_file_io.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/_pywrap_record_io.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/file_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/python_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/lib/io/tf_record.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/module/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/module/module.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/array_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/batch_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/bincount_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/bitwise_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/boosted_trees_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/candidate_sampling_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/check_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/clip_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/clustering_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/collective_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/cond_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/confusion_matrix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_state.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_util_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_v2_func_graphs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_v2_toggles.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/critical_section_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ctc_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/cudnn_rnn_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/custom_gradient.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/data_flow_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/data_flow_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/default_gradient.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/bernoulli.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/beta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/bijector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/bijector_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/bijector_test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/categorical.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/dirichlet.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/dirichlet_multinomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/distribution.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/distributions.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/exponential.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/gamma.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/identity_bijector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/kullback_leibler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/laplace.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/multinomial.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/normal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/special_math.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/student_t.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/transformed_distribution.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/uniform.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/distributions/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/embedding_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/functional_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_audio_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_batch_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_bitwise_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_boosted_trees_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_candidate_sampling_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_checkpoint_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_clustering_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_collective_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_control_flow_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_count_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_ctc_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_cudnn_rnn_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_data_flow_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_dataset_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_debug_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_decode_proto_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_encode_proto_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_experimental_dataset_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_functional_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_image_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_io_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_linalg_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_list_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_logging_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_lookup_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_manip_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_map_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_nccl_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_nn_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_parsing_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_ragged_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_ragged_conversion_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_ragged_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_resource_variable_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_rnn_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_script_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_sdca_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_sendrecv_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_set_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_sparse_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_special_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_spectral_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_state_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_stateful_random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_stateless_random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_stateless_random_ops_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_string_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_summary_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_tpu_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_training_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gen_user_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gradient_checker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gradient_checker_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gradients.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gradients_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/gradients_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/handle_data_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/histogram_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/image_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/image_grad_test_base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/image_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/image_ops_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/init_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/init_ops_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/initializers_ns.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/inplace_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/io_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/adjoint_registrations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/cholesky_registrations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/inverse_registrations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linalg.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linalg_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_addition.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_adjoint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_algebra.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_block_diag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_block_lower_triangular.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_circulant.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_composition.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_diag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_full_matrix.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_householder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_identity.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_inversion.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_kronecker.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_low_rank_update.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_lower_triangular.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_permutation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_toeplitz.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_tridiag.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/linear_operator_zeros.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/matmul_registrations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/registrations_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/solve_registrations.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/conjugate_gradient.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/gen_sparse_csr_matrix_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/sparse.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/sparse_csr_matrix_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg/sparse/sparse_csr_matrix_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/linalg_ops_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/list_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/logging_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/lookup_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/losses/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/losses/losses.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/losses/losses_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/losses/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/manip_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/manip_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/map_fn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/map_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/math_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/metrics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/metrics_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/nccl_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/nn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/nn_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/nn_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/nn_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numerics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_arrays.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_dtypes.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_export.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_random.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/numpy_ops/np_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/op_selector.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/optional_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parallel_for/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parallel_for/control_flow_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parallel_for/gradients.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parallel_for/pfor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parallel_for/test_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parsing_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/parsing_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/partitioned_variables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/proto_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_batch_gather_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_batch_gather_with_default_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_concat_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_conversion_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_dispatch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_factory_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_functional_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_gather_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_getitem.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_map_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_operators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_squeeze_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_string_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_tensor_shape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_tensor_test_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_tensor_value.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/ragged_where_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/row_partition.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/ragged/segment_id_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/random_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/resource_variable_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/resources.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/rnn.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/rnn_cell.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/rnn_cell_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/rnn_cell_wrapper_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/rnn_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/script_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sdca_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/session_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sets_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/dct_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/fft_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/mel_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/mfcc_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/reconstruction_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/shape_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/signal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/spectral_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/util_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/signal/window_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sort_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sparse_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/sparse_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/special_math_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/standard_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/state_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/state_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/stateful_random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/stateless_random_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/string_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/structured/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/structured/structured_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/structured/structured_tensor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/summary_op_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/summary_ops_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/template.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/tensor_array_grad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/tensor_array_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/unconnected_gradients.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/variable_scope.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/variables.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/weights_broadcast_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/while_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/ops/while_v2_indexed_slices_rewriter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/_pywrap_stacktrace_handler.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/_pywrap_tf2.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/analytics.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/app.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/benchmark.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/build_info.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/control_imports.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/device_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/flags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/gfile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/googletest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/parameterized.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/remote_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/resource_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/self_check.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/status_bar.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/sysconfig.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/test.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/platform/tf_logging.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/internal/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/internal/_pywrap_profiler.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/internal/_pywrap_traceme.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/internal/flops_registry.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/model_analyzer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/option_builder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/profiler.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/profiler_client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/profiler_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/tfprof_logger.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/trace.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/profiler/traceme.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/pywrap_mlir.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/pywrap_tensorflow.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/pywrap_tensorflow_internal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/pywrap_tfe.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/builder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/builder_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/function_deserialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/function_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/load.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/load_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/load_v1_in_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/loader_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/main_op.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/main_op_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/method_name_updater.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/model_utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/model_utils/export_output.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/model_utils/export_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/model_utils/mode_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/nested_structure_coder.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/revived_types.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/save_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/save_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/saved_model.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/signature_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/signature_def_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/signature_def_utils_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/signature_serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/simple_save.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/tag_constants.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/saved_model/utils_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/plugin_asset.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/summary.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/summary_iterator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/writer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/writer/event_file_writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/writer/event_file_writer_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/writer/writer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/summary/writer/writer_cache.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tf2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/api/generator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/api/generator/create_python_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/api/generator/doc_srcs.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/freeze_graph.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/import_pb_to_tensorboard.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/inspect_checkpoint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/module_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/optimize_for_inference.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/optimize_for_inference_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/print_selective_registration_header.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/saved_model_aot_compile.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/saved_model_cli.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/saved_model_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/selective_registration_header_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/strip_unused.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tools/strip_unused_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/_tpu_estimator_embedding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/async_checkpoint.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/bfloat16.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/client/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/client/client.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/client/version.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/datasets.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/device_assignment.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/error_handling.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/feature_column.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/feature_column_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/functional.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/ops/tpu_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/preempted_hook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/profiler/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/profiler/profiler_analysis_pb2_grpc.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/session_support.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tensor_tracer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tensor_tracer_flags.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tensor_tracer_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tensor_tracer_report.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/topology.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_config.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_embedding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_embedding_gradient.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_embedding_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_embedding_v2_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_estimator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_feed.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_function.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_name_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_sharding.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_strategy_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/tpu_system_metadata.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/training_loop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/tpu/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/adadelta.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/adagrad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/adagrad_da.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/adam.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/basic_loops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/basic_session_run_hooks.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/checkpoint_management.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/checkpoint_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/checkpoint_state_pb2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/checkpoint_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/coordinator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/device_setter.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/distribute.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/distribution_strategy_context.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/evaluation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/loss_scale.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/loss_scale_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/mixed_precision.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/experimental/mixed_precision_global_state.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/ftrl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/gen_training_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/gradient_descent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/input.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/learning_rate_decay.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/momentum.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/monitored_session.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/moving_averages.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/proximal_adagrad.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/proximal_gradient_descent.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/py_checkpoint_reader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/quantize_training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/queue_runner.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/queue_runner_impl.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/rmsprop.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saver_test_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/checkpoint_options.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/functional_saver.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/saveable_hook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/saveable_object.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/saving/saveable_object_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/server_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/session_manager.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/session_run_hook.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/slot_creator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/summary_io.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/supervisor.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/sync_replicas_optimizer.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/base.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/data_structures.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/graph_view.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/layer_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/python_state.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/tracking/util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/training.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/training_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/training_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/training/warm_starting_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/types/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/types/core.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/types/distribute.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/types/doc_typealias.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/types/internal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/user_ops/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/user_ops/user_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_checkpoint_reader.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_kernel_registry.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_nest.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_stat_summarizer.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_tensor_float_32_execution.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_tfprof.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_transform_graph.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_util_port.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_pywrap_utils.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/_tf_stack.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/all_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/compat.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/compat_internal.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/decorator_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/deprecation_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/dispatch.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/example_parser_configuration.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/fast_module_type.so" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/function_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/future_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/is_in_graph_mode.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/keras_deps.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/keyword_args.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/lazy_loader.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/lock_util.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/memory.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/module_wrapper.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/nest.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/object_identity.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/protobuf/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/protobuf/compare.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/serialization.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_contextlib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_decorator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_export.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_inspect.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_should_use.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/python/util/tf_stack.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/common/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/common/public_api.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/common/test_module1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/common/test_module2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/common/traverse.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/all_renames_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/ast_edits.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/ipynb.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/module_deprecations_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/renames_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/reorders_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/tf_upgrade_v2.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/tf_upgrade_v2_main.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/compatibility/tf_upgrade_v2_safety.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/docs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/docs/doc_controls.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/docs/tf_doctest_lib.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/pip_package/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/pip_package/setup.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/tools/pip_package/simple_console.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/CMakeLists.txt" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/tf2xla/xla_compiled_cpu_function.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/cpu_function_runtime.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/executable_run_options.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_conv2d.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_fft.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_fork_join.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_fp16.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_key_value_sort.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_matmul.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_pow.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_conv2d.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_fft.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_single_threaded_matmul.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/compiler/xla/service/cpu/runtime_topk.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/kernels/eigen_contraction_kernel.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/cord.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/ctstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/ctstring_internal.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/default/cord.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/default/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/default/env_time.cc" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/default/integral_types.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/dynamic_annotations.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/env_time.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/macros.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/platform.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/tstring.h" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow/xla_aot_runtime_src/tensorflow/core/platform/types.h" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/INSTALLER" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/METADATA" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/RECORD" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/WHEEL" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/top_level.txt" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/export/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/inputs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/estimator/tpu/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v1/v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/estimator/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/estimator/export/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/estimator/inputs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/_api/v2/v2.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/__init__.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/export/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/inputs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v1/v1.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/estimator/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/estimator/experimental/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/estimator/export/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/estimator/inputs/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/api/_v2/v2.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/__init__.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/baseline.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py" afterDir="false" />
@@ -11577,11 +37,6 @@
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/head.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/kmeans.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_optimizer/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_optimizer/python/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/sdca_ops.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/sharded_mutable_dense_hashtable.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_testing_utils.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/metric_keys.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/optimizers.py" afterDir="false" />
@@ -11589,13 +44,6 @@
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/prediction_keys.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/rnn.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/saved_model_estimator.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/__init__.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/ar_model.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/estimators.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/feature_keys.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/head.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/math_utils.py" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py" afterDir="false" />
       <change afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/state_management.py" afterDir="false" />
@@ -11861,8 +309,8 @@
       <file pinned="false" current-in-tab="true">
         <entry file="file://$PROJECT_DIR$/image_classifier_model_builder.py">
           <provider selected="true" editor-type-id="text-editor">
-            <state relative-caret-position="255">
-              <caret line="17" lean-forward="true" selection-start-line="17" selection-end-line="17" />
+            <state relative-caret-position="101">
+              <caret line="17" selection-start-line="17" selection-end-line="17" />
               <folding>
                 <element signature="e#0#16#0" expanded="true" />
               </folding>
@@ -12030,7 +478,14 @@
       <option name="project" value="LOCAL" />
       <updated>1621698013242</updated>
     </task>
-    <option name="localTasksCounter" value="2" />
+    <task id="LOCAL-00002" summary="Adding the image_classifier_model_builder, and the model. Also adding example pictures">
+      <created>1621852070551</created>
+      <option name="number" value="00002" />
+      <option name="presentableId" value="LOCAL-00002" />
+      <option name="project" value="LOCAL" />
+      <updated>1621852070551</updated>
+    </task>
+    <option name="localTasksCounter" value="3" />
     <servers />
   </component>
   <component name="ToolWindowManager">
@@ -12043,12 +498,12 @@
       <window_info anchor="bottom" id="Docker" show_stripe_button="false" />
       <window_info anchor="bottom" id="Database Changes" show_stripe_button="false" />
       <window_info anchor="bottom" id="Version Control" show_stripe_button="false" />
-      <window_info anchor="bottom" id="Python Console" />
-      <window_info anchor="bottom" id="Terminal" weight="0.32972136" />
+      <window_info anchor="bottom" id="Python Console" weight="0.32972136" />
+      <window_info anchor="bottom" id="Terminal" weight="0.6687307" />
       <window_info anchor="bottom" id="Event Log" side_tool="true" />
       <window_info anchor="bottom" id="Message" order="0" />
       <window_info anchor="bottom" id="Find" order="1" />
-      <window_info anchor="bottom" id="Run" order="2" visible="true" weight="0.32972136" />
+      <window_info anchor="bottom" id="Run" order="2" weight="0.32972136" />
       <window_info anchor="bottom" id="Debug" order="3" weight="0.4" />
       <window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
       <window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
@@ -12083,8 +538,8 @@
     </entry>
     <entry file="file://$PROJECT_DIR$/image_classifier_model_builder.py">
       <provider selected="true" editor-type-id="text-editor">
-        <state relative-caret-position="255">
-          <caret line="17" lean-forward="true" selection-start-line="17" selection-end-line="17" />
+        <state relative-caret-position="101">
+          <caret line="17" selection-start-line="17" selection-end-line="17" />
           <folding>
             <element signature="e#0#16#0" expanded="true" />
           </folding>
diff --git a/frog.jpg b/frog.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2b26eb9f4d913132726aaa7d62fe399521a7a2fa
GIT binary patch
literal 1465
zcmex=<NpH&0WUXCHwH#V1_loX4+e(+4;gw~D>Bm<7<_#hv=|r|I2c$Ng&3F_7#J8C
z7#SECr5ISjYz77|Mrk-Zh*1NohKYfpJ(Gb2swRp70z9C62+h2J5vrPzfnfm?T(!ai
zW&|6g@&8)}i-3&$lKkR~`~n4IJp%>{Cr@7%km(Ez5Vk5v9fQCX*`AyI44YV9HgL{(
z^TVm`{~-oJ4h97V1!hJi1|~s9W<kdPM;PQ87#LX@!2qm{fq|Kcg^`tkjh%y&i~Ii(
zhQk63jLghTEX*uyEbMG79BjpmObpC|EJBK`hQdk$Y>t6OB8i2{?2QT&okSM~8E;fc
zG7vjhbkTW|NfXDzU<cJ?anlczix&mBY-;|f7Gjn%rQ}e;rIy7K1&`ES%~LmrmM)ok
zxb@Rzx3Y%E?qO+LIJqRHG&Hp=EUi2|y}~0RqtY`nv&t(ftJ>N-I=d!Jn?7UatOZM#
zEnl&6)rPIxw(r=v>%ftt$Bv&kdFsNItJkjIxOwZrlc&#~zj*oT^OvvRzW@073*<{i
z24=7yK?K;JAWyKcB77;xz{JGJ%*@Qh%*w>d#KO|f$iO7XEFh%FqG0GK%xa)y6e!}r
zW~`j(Bw83`(iosJaiQ}@G4>?W;13s76ArpOEcz(klx$X@<{C0debFTjPA&<sKg=yG
zt=!x_JiS80K%PlSO#}I-xTLfU<fn$_mR5+rCQq3P@!H}gP|t1JyanpNLx*8LybSZ=
zqsLHBeuDe*HiI(*D>DlV3o|PV3kxeND;qlx2Rl1EJ3lu!CyyY%kdPq1prEjrjJU9f
zl&GMfgrbC$tgO7eys)^Eijtg)jGVmO|Jw}CjI6Az>}>3O92|UdB7!1vB!mCA7<ib$
z!6(ST!1(bu(=P`Z*7`M-i{&=2_*^aH{jToW_9gZIZ!vH%GBPl-GCfM}U3SaWQ7N`}
z)ADJzv$pz_Pj9++*5jbHmtt3pf%Qxwm$gY#Q@vd8+*!SQX5QrDCH7#w%z}*ejH^>L
zduC4X1`9BO1m;CN{`6?(37wt4igji;H>nvs1<Nrqu-P-cSvgVCaKS~dFK?!(cYe(@
zDqS_}Yk+0YEZr;-k7J!Z_0d1=Vy2$Vy!d<9)wuLcwL#A#+%*kXDl)YPKReg2^|jJ)
zR&vbBzjs=)+FH)I3LjK%Ui|+EgRuZ8=Q4o+GpNj9;${?NP&9N5Oe}1ixNzga4;KX<
zepE1U2uLVsn6O~OfeR0y4q~)d<kFT*n9+Fr$LFgJlUh?Yc=2zW%6Dx^jeGa;W=$Rb
zv`6p#SJnKA7UY|6|Ehuc8|S&7uhy_$at!vjocwC%8RcEpEvKy3G0nfzzU{Tbhm`Nh
z0{c;dUSN;MACLnD85u!NWMpDx1qC=GCnJNPqM?9eV4_0dhlz~~H(osW@FRu;LBT5^
z?-7xc*t<#}>|{{*3LO8rpXI`aANx-IXSgnAycaCN$iQeXP;f5umH&!`#ur|xyfkJn
z=sOw3Rku2FZ$LsMW3y#@@L8SgEt*}jJ7>Oo5&Mr_@lcxi+$L>i1BD8~z{RC^ScErp
zREMnK$Ztq!F!-hyyieOPaAkOck=n@!Nsak|=@t{D-Uv=ub4i28K<G*1x33bbq%@|*
QB_CLLIxJD8CjS3T0FNc^fdBvi

literal 0
HcmV?d00001

diff --git a/image_classifier.py b/image_classifier.py
index 888def36..e8ca72cd 100644
--- a/image_classifier.py
+++ b/image_classifier.py
@@ -7,7 +7,7 @@ class_names = ['Plane', 'Car', 'Bird', 'Cat', 'Deer', 'Dog', 'Frog', 'Horse', 'S
 
 model = models.load_model('image_classifier.model')
 
-img = cv.imread('dog.jpg')
+img = cv.imread('frog.jpg')
 img = cv.cvtColor(img, cv.COLOR_BGR2RGB)
 
 plt.imshow(img, cmap=plt.cm.binary)
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/LICENSE.rst b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/LICENSE.rst
new file mode 100644
index 00000000..c37cae49
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/METADATA b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/METADATA
new file mode 100644
index 00000000..f991d5aa
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/METADATA
@@ -0,0 +1,128 @@
+Metadata-Version: 2.1
+Name: Werkzeug
+Version: 2.0.1
+Summary: The comprehensive WSGI web application library.
+Home-page: https://palletsprojects.com/p/werkzeug/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://werkzeug.palletsprojects.com/
+Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/werkzeug/
+Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+Requires-Dist: dataclasses ; python_version < "3.7"
+Provides-Extra: watchdog
+Requires-Dist: watchdog ; extra == 'watchdog'
+
+Werkzeug
+========
+
+*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
+
+Werkzeug is a comprehensive `WSGI`_ web application library. It began as
+a simple collection of various utilities for WSGI applications and has
+become one of the most advanced WSGI utility libraries.
+
+It includes:
+
+-   An interactive debugger that allows inspecting stack traces and
+    source code in the browser with an interactive interpreter for any
+    frame in the stack.
+-   A full-featured request object with objects to interact with
+    headers, query args, form data, files, and cookies.
+-   A response object that can wrap other WSGI applications and handle
+    streaming data.
+-   A routing system for matching URLs to endpoints and generating URLs
+    for endpoints, with an extensible system for capturing variables
+    from URLs.
+-   HTTP utilities to handle entity tags, cache control, dates, user
+    agents, cookies, files, and more.
+-   A threaded WSGI server for use while developing applications
+    locally.
+-   A test client for simulating HTTP requests during testing without
+    requiring running a server.
+
+Werkzeug doesn't enforce any dependencies. It is up to the developer to
+choose a template engine, database adapter, and even how to handle
+requests. It can be used to build all sorts of end user applications
+such as blogs, wikis, or bulletin boards.
+
+`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
+providing more structure and patterns for defining powerful
+applications.
+
+.. _WSGI: https://wsgi.readthedocs.io/en/latest/
+.. _Flask: https://www.palletsprojects.com/p/flask/
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    pip install -U Werkzeug
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+    from werkzeug.wrappers import Request, Response
+
+    @Request.application
+    def application(request):
+        return Response('Hello, World!')
+
+    if __name__ == '__main__':
+        from werkzeug.serving import run_simple
+        run_simple('localhost', 4000, application)
+
+
+Donate
+------
+
+The Pallets organization develops and supports Werkzeug and other
+popular packages. In order to grow the community of contributors and
+users, and allow the maintainers to devote more time to the projects,
+`please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://werkzeug.palletsprojects.com/
+-   Changes: https://werkzeug.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/Werkzeug/
+-   Source Code: https://github.com/pallets/werkzeug/
+-   Issue Tracker: https://github.com/pallets/werkzeug/issues/
+-   Website: https://palletsprojects.com/p/werkzeug/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/RECORD b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/RECORD
new file mode 100644
index 00000000..78bf2d58
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/RECORD
@@ -0,0 +1,111 @@
+Werkzeug-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Werkzeug-2.0.1.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Werkzeug-2.0.1.dist-info/METADATA,sha256=8-W33EMnGqnCCi-d8Dv63IQQuyELRIsXhwOJNXbNgL0,4421
+Werkzeug-2.0.1.dist-info/RECORD,,
+Werkzeug-2.0.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+Werkzeug-2.0.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
+werkzeug/__init__.py,sha256=_CCsfdeqNllFNRJx8cvqYrwBsQQQXJaMmnk2sAZnDng,188
+werkzeug/__pycache__/__init__.cpython-37.pyc,,
+werkzeug/__pycache__/_internal.cpython-37.pyc,,
+werkzeug/__pycache__/_reloader.cpython-37.pyc,,
+werkzeug/__pycache__/datastructures.cpython-37.pyc,,
+werkzeug/__pycache__/exceptions.cpython-37.pyc,,
+werkzeug/__pycache__/filesystem.cpython-37.pyc,,
+werkzeug/__pycache__/formparser.cpython-37.pyc,,
+werkzeug/__pycache__/http.cpython-37.pyc,,
+werkzeug/__pycache__/local.cpython-37.pyc,,
+werkzeug/__pycache__/routing.cpython-37.pyc,,
+werkzeug/__pycache__/security.cpython-37.pyc,,
+werkzeug/__pycache__/serving.cpython-37.pyc,,
+werkzeug/__pycache__/test.cpython-37.pyc,,
+werkzeug/__pycache__/testapp.cpython-37.pyc,,
+werkzeug/__pycache__/urls.cpython-37.pyc,,
+werkzeug/__pycache__/user_agent.cpython-37.pyc,,
+werkzeug/__pycache__/useragents.cpython-37.pyc,,
+werkzeug/__pycache__/utils.cpython-37.pyc,,
+werkzeug/__pycache__/wsgi.cpython-37.pyc,,
+werkzeug/_internal.py,sha256=_QKkvdaG4pDFwK68c0EpPzYJGe9Y7toRAT1cBbC-CxU,18572
+werkzeug/_reloader.py,sha256=B1hEfgsUOz2IginBQM5Zak_eaIF7gr3GS5-0x2OHvAE,13950
+werkzeug/datastructures.py,sha256=KahVPSLOapbNbKh1ppr9K8_DgWJv1EGgA9DhTEGMHcg,97886
+werkzeug/datastructures.pyi,sha256=5DTPF8P8Zvi458eK27Qcj7eNUlLM_AC0jBNkj6uQpds,33774
+werkzeug/debug/__init__.py,sha256=CUFrPEYAaotHRkmjOieqd3EasXDii2JVC1HdmEzMwqM,17924
+werkzeug/debug/__pycache__/__init__.cpython-37.pyc,,
+werkzeug/debug/__pycache__/console.cpython-37.pyc,,
+werkzeug/debug/__pycache__/repr.cpython-37.pyc,,
+werkzeug/debug/__pycache__/tbtools.cpython-37.pyc,,
+werkzeug/debug/console.py,sha256=E1nBMEvFkX673ShQjPtVY-byYatfX9MN-dBMjRI8a8E,5897
+werkzeug/debug/repr.py,sha256=QCSHENKsChEZDCIApkVi_UNjhJ77v8BMXK1OfxO189M,9483
+werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
+werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222
+werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
+werkzeug/debug/shared/debugger.js,sha256=dYbUmFmb3YZb5YpWpYPOQArdrN7NPeY0ODawL7ihzDI,10524
+werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
+werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
+werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
+werkzeug/debug/shared/style.css,sha256=vyp1RnB227Fuw8LIyM5C-bBCBQN5hvZSCApY2oeJ9ik,6705
+werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
+werkzeug/debug/tbtools.py,sha256=TfReusPbM3yjm3xvOFkH45V7-5JnNqB9x1EQPnVC6Xo,19189
+werkzeug/exceptions.py,sha256=CUwx0pBiNbk4f9cON17ekgKnmLi6HIVFjUmYZc2x0wM,28681
+werkzeug/filesystem.py,sha256=JS2Dv2QF98WILxY4_thHl-WMcUcwluF_4igkDPaP1l4,1956
+werkzeug/formparser.py,sha256=GIKfzuQ_khuBXnf3N7_LzOEruYwNc3m4bI02BgtT5jg,17385
+werkzeug/http.py,sha256=oUCXFFMnkOQ-cHbUY_aiqitshcrSzNDq3fEMf1VI_yk,45141
+werkzeug/local.py,sha256=WsR6H-2XOtPigpimjORbLsS3h9WI0lCdZjGI2LHDDxA,22733
+werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500
+werkzeug/middleware/__pycache__/__init__.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/dispatcher.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/http_proxy.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/lint.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/profiler.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/proxy_fix.cpython-37.pyc,,
+werkzeug/middleware/__pycache__/shared_data.cpython-37.pyc,,
+werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580
+werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558
+werkzeug/middleware/lint.py,sha256=yMzMdm4xI2_N-Wv2j1yoaVI3ltHOYS6yZyA-wUv1sKw,13962
+werkzeug/middleware/profiler.py,sha256=G2JieUMv4QPamtCY6ibIK7P-piPRdPybav7bm2MSFvs,4898
+werkzeug/middleware/proxy_fix.py,sha256=uRgQ3dEvFV8JxUqajHYYYOPEeA_BFqaa51Yp8VW0uzA,6849
+werkzeug/middleware/shared_data.py,sha256=eOCGr-i6BCexDfL7xdPRWMwPJPgp0NE2B416Gl67Q78,10959
+werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+werkzeug/routing.py,sha256=FDRtvCfaZSmXnQ0cUYxowb3P0y0dxlUlMSUmerY5sb0,84147
+werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+werkzeug/sansio/__pycache__/__init__.cpython-37.pyc,,
+werkzeug/sansio/__pycache__/multipart.cpython-37.pyc,,
+werkzeug/sansio/__pycache__/request.cpython-37.pyc,,
+werkzeug/sansio/__pycache__/response.cpython-37.pyc,,
+werkzeug/sansio/__pycache__/utils.cpython-37.pyc,,
+werkzeug/sansio/multipart.py,sha256=bJMCNC2f5xyAaylahNViJ0JqmV4ThLRbDVGVzKwcqrQ,8751
+werkzeug/sansio/request.py,sha256=aA9rABkWiG4MhYMByanst2NXkEclsq8SIxhb0LQf0e0,20228
+werkzeug/sansio/response.py,sha256=HSG6t-tyPZd3awzWqr7qL9IV4HYAvDgON1c0YPU2RXw,24117
+werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164
+werkzeug/security.py,sha256=gPDRuCjkjWrcqj99tBMq8_nHFZLFQjgoW5Ga5XIw9jo,8158
+werkzeug/serving.py,sha256=_RG2dCclOQcdjJ2NE8tzCRybGePlwcs8kTypiWRP2gY,38030
+werkzeug/test.py,sha256=EJXJy-b_JriHrlfs5VNCkwbki8Kn_xUDkOYOCx_6Q7Q,48096
+werkzeug/testapp.py,sha256=f48prWSGJhbSrvYb8e1fnAah4BkrLb0enHSdChgsjBY,9471
+werkzeug/urls.py,sha256=3o_aUcr5Ou13XihSU6VvX6RHMhoWkKpXuCCia9SSAb8,41021
+werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420
+werkzeug/useragents.py,sha256=G8tmv_6vxJaPrLQH3eODNgIYe0_V6KETROQlJI-WxDE,7264
+werkzeug/utils.py,sha256=WrU-LbwemyGd8zBHBgQyLaIxing4QLEChiP0qnzr2sc,36771
+werkzeug/wrappers/__init__.py,sha256=-s75nPbyXHzU_rwmLPDhoMuGbEUk0jZT_n0ZQAOFGf8,654
+werkzeug/wrappers/__pycache__/__init__.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/accept.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/auth.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/base_request.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/base_response.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/common_descriptors.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/cors.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/etag.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/json.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/request.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/response.cpython-37.pyc,,
+werkzeug/wrappers/__pycache__/user_agent.cpython-37.pyc,,
+werkzeug/wrappers/accept.py,sha256=_oZtAQkahvsrPRkNj2fieg7_St9P0NFC3SgZbJKS6xU,429
+werkzeug/wrappers/auth.py,sha256=rZPCzGxHk9R55PRkmS90kRywUVjjuMWzCGtH68qCq8U,856
+werkzeug/wrappers/base_request.py,sha256=saz9RyNQkvI_XLPYVm29KijNHmD1YzgxDqa0qHTbgss,1174
+werkzeug/wrappers/base_response.py,sha256=q_-TaYywT5G4zA-DWDRDJhJSat2_4O7gOPob6ye4_9A,1186
+werkzeug/wrappers/common_descriptors.py,sha256=v_kWLH3mvCiSRVJ1FNw7nO3w2UJfzY57UKKB5J4zCvE,898
+werkzeug/wrappers/cors.py,sha256=c5UndlZsZvYkbPrp6Gj5iSXxw_VOJDJHskO6-jRmNyQ,846
+werkzeug/wrappers/etag.py,sha256=XHWQQs7Mdd1oWezgBIsl-bYe8ydKkRZVil2Qd01D0Mo,846
+werkzeug/wrappers/json.py,sha256=HM1btPseGeXca0vnwQN_MvZl6h-qNsFY5YBKXKXFwus,410
+werkzeug/wrappers/request.py,sha256=0zAkCUwJbUBzioGy2UKxE6XpuXPAZbEhhML4WErzeBo,24818
+werkzeug/wrappers/response.py,sha256=95hXIysZTeNC0bqhvGB2fLBRKxedR_cgI5szZZWfyzw,35177
+werkzeug/wrappers/user_agent.py,sha256=Wl1-A0-1r8o7cHIZQTB55O4Ged6LpCKENaQDlOY5pXA,435
+werkzeug/wsgi.py,sha256=7psV3SHLtCzk1KSuGmIK5uP2QTDXyfCCDclyqCmIUO4,33715
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/WHEEL b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/WHEEL
new file mode 100644
index 00000000..385faab0
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/top_level.txt
new file mode 100644
index 00000000..6fe8da84
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/Werkzeug-2.0.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+werkzeug
diff --git a/venv/lib/python3.7/site-packages/cached_property.py b/venv/lib/python3.7/site-packages/cached_property.py
new file mode 100644
index 00000000..3135871b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/cached_property.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+
+__author__ = "Daniel Greenfeld"
+__email__ = "pydanny@gmail.com"
+__version__ = "1.5.2"
+__license__ = "BSD"
+
+from functools import wraps
+from time import time
+import threading
+
+try:
+    import asyncio
+except (ImportError, SyntaxError):
+    asyncio = None
+
+
+class cached_property(object):
+    """
+    A property that is only computed once per instance and then replaces itself
+    with an ordinary attribute. Deleting the attribute resets the property.
+    Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
+    """  # noqa
+
+    def __init__(self, func):
+        self.__doc__ = getattr(func, "__doc__")
+        self.func = func
+
+    def __get__(self, obj, cls):
+        if obj is None:
+            return self
+
+        if asyncio and asyncio.iscoroutinefunction(self.func):
+            return self._wrap_in_coroutine(obj)
+
+        value = obj.__dict__[self.func.__name__] = self.func(obj)
+        return value
+
+    def _wrap_in_coroutine(self, obj):
+        @wraps(obj)
+        @asyncio.coroutine
+        def wrapper():
+            future = asyncio.ensure_future(self.func(obj))
+            obj.__dict__[self.func.__name__] = future
+            return future
+
+        return wrapper()
+
+
+class threaded_cached_property(object):
+    """
+    A cached_property version for use in environments where multiple threads
+    might concurrently try to access the property.
+    """
+
+    def __init__(self, func):
+        self.__doc__ = getattr(func, "__doc__")
+        self.func = func
+        self.lock = threading.RLock()
+
+    def __get__(self, obj, cls):
+        if obj is None:
+            return self
+
+        obj_dict = obj.__dict__
+        name = self.func.__name__
+        with self.lock:
+            try:
+                # check if the value was computed before the lock was acquired
+                return obj_dict[name]
+
+            except KeyError:
+                # if not, do the calculation and release the lock
+                return obj_dict.setdefault(name, self.func(obj))
+
+
+class cached_property_with_ttl(object):
+    """
+    A property that is only computed once per instance and then replaces itself
+    with an ordinary attribute. Setting the ttl to a number expresses how long
+    the property will last before being timed out.
+    """
+
+    def __init__(self, ttl=None):
+        if callable(ttl):
+            func = ttl
+            ttl = None
+        else:
+            func = None
+        self.ttl = ttl
+        self._prepare_func(func)
+
+    def __call__(self, func):
+        self._prepare_func(func)
+        return self
+
+    def __get__(self, obj, cls):
+        if obj is None:
+            return self
+
+        now = time()
+        obj_dict = obj.__dict__
+        name = self.__name__
+        try:
+            value, last_updated = obj_dict[name]
+        except KeyError:
+            pass
+        else:
+            ttl_expired = self.ttl and self.ttl < now - last_updated
+            if not ttl_expired:
+                return value
+
+        value = self.func(obj)
+        obj_dict[name] = (value, now)
+        return value
+
+    def __delete__(self, obj):
+        obj.__dict__.pop(self.__name__, None)
+
+    def __set__(self, obj, value):
+        obj.__dict__[self.__name__] = (value, time())
+
+    def _prepare_func(self, func):
+        self.func = func
+        if func:
+            self.__doc__ = func.__doc__
+            self.__name__ = func.__name__
+            self.__module__ = func.__module__
+
+
+# Aliases to make cached_property_with_ttl easier to use
+cached_property_ttl = cached_property_with_ttl
+timed_cached_property = cached_property_with_ttl
+
+
+class threaded_cached_property_with_ttl(cached_property_with_ttl):
+    """
+    A cached_property version for use in environments where multiple threads
+    might concurrently try to access the property.
+    """
+
+    def __init__(self, ttl=None):
+        super(threaded_cached_property_with_ttl, self).__init__(ttl)
+        self.lock = threading.RLock()
+
+    def __get__(self, obj, cls):
+        with self.lock:
+            return super(threaded_cached_property_with_ttl, self).__get__(obj, cls)
+
+
+# Alias to make threaded_cached_property_with_ttl easier to use
+threaded_cached_property_ttl = threaded_cached_property_with_ttl
+timed_threaded_cached_property = threaded_cached_property_with_ttl
diff --git a/venv/lib/python3.7/site-packages/cycler.py b/venv/lib/python3.7/site-packages/cycler.py
new file mode 100644
index 00000000..3c3eb2d5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/cycler.py
@@ -0,0 +1,558 @@
+"""
+Cycler
+======
+
+Cycling through combinations of values, producing dictionaries.
+
+You can add cyclers::
+
+    from cycler import cycler
+    cc = (cycler(color=list('rgb')) +
+          cycler(linestyle=['-', '--', '-.']))
+    for d in cc:
+        print(d)
+
+Results in::
+
+    {'color': 'r', 'linestyle': '-'}
+    {'color': 'g', 'linestyle': '--'}
+    {'color': 'b', 'linestyle': '-.'}
+
+
+You can multiply cyclers::
+
+    from cycler import cycler
+    cc = (cycler(color=list('rgb')) *
+          cycler(linestyle=['-', '--', '-.']))
+    for d in cc:
+        print(d)
+
+Results in::
+
+    {'color': 'r', 'linestyle': '-'}
+    {'color': 'r', 'linestyle': '--'}
+    {'color': 'r', 'linestyle': '-.'}
+    {'color': 'g', 'linestyle': '-'}
+    {'color': 'g', 'linestyle': '--'}
+    {'color': 'g', 'linestyle': '-.'}
+    {'color': 'b', 'linestyle': '-'}
+    {'color': 'b', 'linestyle': '--'}
+    {'color': 'b', 'linestyle': '-.'}
+"""
+
+from __future__ import (absolute_import, division, print_function,
+                        unicode_literals)
+
+import six
+from itertools import product, cycle
+from six.moves import zip, reduce
+from operator import mul, add
+import copy
+
+__version__ = '0.10.0'
+
+
+def _process_keys(left, right):
+    """
+    Helper function to compose cycler keys
+
+    Parameters
+    ----------
+    left, right : iterable of dictionaries or None
+        The cyclers to be composed
+    Returns
+    -------
+    keys : set
+        The keys in the composition of the two cyclers
+    """
+    l_peek = next(iter(left)) if left is not None else {}
+    r_peek = next(iter(right)) if right is not None else {}
+    l_key = set(l_peek.keys())
+    r_key = set(r_peek.keys())
+    if l_key & r_key:
+        raise ValueError("Can not compose overlapping cycles")
+    return l_key | r_key
+
+
+class Cycler(object):
+    """
+    Composable cycles
+
+    This class has compositions methods:
+
+    ``+``
+      for 'inner' products (zip)
+
+    ``+=``
+      in-place ``+``
+
+    ``*``
+      for outer products (itertools.product) and integer multiplication
+
+    ``*=``
+      in-place ``*``
+
+    and supports basic slicing via ``[]``
+
+    Parameters
+    ----------
+    left : Cycler or None
+        The 'left' cycler
+
+    right : Cycler or None
+        The 'right' cycler
+
+    op : func or None
+        Function which composes the 'left' and 'right' cyclers.
+
+    """
+    def __call__(self):
+        return cycle(self)
+
+    def __init__(self, left, right=None, op=None):
+        """Semi-private init
+
+        Do not use this directly, use `cycler` function instead.
+        """
+        if isinstance(left, Cycler):
+            self._left = Cycler(left._left, left._right, left._op)
+        elif left is not None:
+            # Need to copy the dictionary or else that will be a residual
+            # mutable that could lead to strange errors
+            self._left = [copy.copy(v) for v in left]
+        else:
+            self._left = None
+
+        if isinstance(right, Cycler):
+            self._right = Cycler(right._left, right._right, right._op)
+        elif right is not None:
+            # Need to copy the dictionary or else that will be a residual
+            # mutable that could lead to strange errors
+            self._right = [copy.copy(v) for v in right]
+        else:
+            self._right = None
+
+        self._keys = _process_keys(self._left, self._right)
+        self._op = op
+
+    @property
+    def keys(self):
+        """
+        The keys this Cycler knows about
+        """
+        return set(self._keys)
+
+    def change_key(self, old, new):
+        """
+        Change a key in this cycler to a new name.
+        Modification is performed in-place.
+
+        Does nothing if the old key is the same as the new key.
+        Raises a ValueError if the new key is already a key.
+        Raises a KeyError if the old key isn't a key.
+
+        """
+        if old == new:
+            return
+        if new in self._keys:
+            raise ValueError("Can't replace %s with %s, %s is already a key" %
+                             (old, new, new))
+        if old not in self._keys:
+            raise KeyError("Can't replace %s with %s, %s is not a key" %
+                           (old, new, old))
+
+        self._keys.remove(old)
+        self._keys.add(new)
+
+        if self._right is not None and old in self._right.keys:
+            self._right.change_key(old, new)
+
+        # self._left should always be non-None
+        # if self._keys is non-empty.
+        elif isinstance(self._left, Cycler):
+            self._left.change_key(old, new)
+        else:
+            # It should be completely safe at this point to
+            # assume that the old key can be found in each
+            # iteration.
+            self._left = [{new: entry[old]} for entry in self._left]
+
+    def _compose(self):
+        """
+        Compose the 'left' and 'right' components of this cycle
+        with the proper operation (zip or product as of now)
+        """
+        for a, b in self._op(self._left, self._right):
+            out = dict()
+            out.update(a)
+            out.update(b)
+            yield out
+
+    @classmethod
+    def _from_iter(cls, label, itr):
+        """
+        Class method to create 'base' Cycler objects
+        that do not have a 'right' or 'op' and for which
+        the 'left' object is not another Cycler.
+
+        Parameters
+        ----------
+        label : str
+            The property key.
+
+        itr : iterable
+            Finite length iterable of the property values.
+
+        Returns
+        -------
+        cycler : Cycler
+            New 'base' `Cycler`
+        """
+        ret = cls(None)
+        ret._left = list({label: v} for v in itr)
+        ret._keys = set([label])
+        return ret
+
+    def __getitem__(self, key):
+        # TODO : maybe add numpy style fancy slicing
+        if isinstance(key, slice):
+            trans = self.by_key()
+            return reduce(add, (_cycler(k, v[key])
+                                for k, v in six.iteritems(trans)))
+        else:
+            raise ValueError("Can only use slices with Cycler.__getitem__")
+
+    def __iter__(self):
+        if self._right is None:
+            return iter(dict(l) for l in self._left)
+
+        return self._compose()
+
+    def __add__(self, other):
+        """
+        Pair-wise combine two equal length cycles (zip)
+
+        Parameters
+        ----------
+        other : Cycler
+           The second Cycler
+        """
+        if len(self) != len(other):
+            raise ValueError("Can only add equal length cycles, "
+                             "not {0} and {1}".format(len(self), len(other)))
+        return Cycler(self, other, zip)
+
+    def __mul__(self, other):
+        """
+        Outer product of two cycles (`itertools.product`) or integer
+        multiplication.
+
+        Parameters
+        ----------
+        other : Cycler or int
+           The second Cycler or integer
+        """
+        if isinstance(other, Cycler):
+            return Cycler(self, other, product)
+        elif isinstance(other, int):
+            trans = self.by_key()
+            return reduce(add, (_cycler(k, v*other)
+                                for k, v in six.iteritems(trans)))
+        else:
+            return NotImplemented
+
+    def __rmul__(self, other):
+        return self * other
+
+    def __len__(self):
+        op_dict = {zip: min, product: mul}
+        if self._right is None:
+            return len(self._left)
+        l_len = len(self._left)
+        r_len = len(self._right)
+        return op_dict[self._op](l_len, r_len)
+
+    def __iadd__(self, other):
+        """
+        In-place pair-wise combine two equal length cycles (zip)
+
+        Parameters
+        ----------
+        other : Cycler
+           The second Cycler
+        """
+        if not isinstance(other, Cycler):
+            raise TypeError("Cannot += with a non-Cycler object")
+        # True shallow copy of self is fine since this is in-place
+        old_self = copy.copy(self)
+        self._keys = _process_keys(old_self, other)
+        self._left = old_self
+        self._op = zip
+        self._right = Cycler(other._left, other._right, other._op)
+        return self
+
+    def __imul__(self, other):
+        """
+        In-place outer product of two cycles (`itertools.product`)
+
+        Parameters
+        ----------
+        other : Cycler
+           The second Cycler
+        """
+        if not isinstance(other, Cycler):
+            raise TypeError("Cannot *= with a non-Cycler object")
+        # True shallow copy of self is fine since this is in-place
+        old_self = copy.copy(self)
+        self._keys = _process_keys(old_self, other)
+        self._left = old_self
+        self._op = product
+        self._right = Cycler(other._left, other._right, other._op)
+        return self
+
+    def __eq__(self, other):
+        """
+        Check equality
+        """
+        if len(self) != len(other):
+            return False
+        if self.keys ^ other.keys:
+            return False
+
+        return all(a == b for a, b in zip(self, other))
+
+    def __repr__(self):
+        op_map = {zip: '+', product: '*'}
+        if self._right is None:
+            lab = self.keys.pop()
+            itr = list(v[lab] for v in self)
+            return "cycler({lab!r}, {itr!r})".format(lab=lab, itr=itr)
+        else:
+            op = op_map.get(self._op, '?')
+            msg = "({left!r} {op} {right!r})"
+            return msg.format(left=self._left, op=op, right=self._right)
+
+    def _repr_html_(self):
+        # an table showing the value of each key through a full cycle
+        output = "<table>"
+        sorted_keys = sorted(self.keys, key=repr)
+        for key in sorted_keys:
+            output += "<th>{key!r}</th>".format(key=key)
+        for d in iter(self):
+            output += "<tr>"
+            for k in sorted_keys:
+                output += "<td>{val!r}</td>".format(val=d[k])
+            output += "</tr>"
+        output += "</table>"
+        return output
+
+    def by_key(self):
+        """Values by key
+
+        This returns the transposed values of the cycler.  Iterating
+        over a `Cycler` yields dicts with a single value for each key,
+        this method returns a `dict` of `list` which are the values
+        for the given key.
+
+        The returned value can be used to create an equivalent `Cycler`
+        using only `+`.
+
+        Returns
+        -------
+        transpose : dict
+            dict of lists of the values for each key.
+        """
+
+        # TODO : sort out if this is a bottle neck, if there is a better way
+        # and if we care.
+
+        keys = self.keys
+        # change this to dict comprehension when drop 2.6
+        out = dict((k,  list()) for k in keys)
+
+        for d in self:
+            for k in keys:
+                out[k].append(d[k])
+        return out
+
+    # for back compatibility
+    _transpose = by_key
+
+    def simplify(self):
+        """Simplify the Cycler
+
+        Returned as a composition using only sums (no multiplications)
+
+        Returns
+        -------
+        simple : Cycler
+            An equivalent cycler using only summation"""
+        # TODO: sort out if it is worth the effort to make sure this is
+        # balanced.  Currently it is is
+        # (((a + b) + c) + d) vs
+        # ((a + b) + (c + d))
+        # I would believe that there is some performance implications
+
+        trans = self.by_key()
+        return reduce(add, (_cycler(k, v) for k, v in six.iteritems(trans)))
+
+    def concat(self, other):
+        """Concatenate this cycler and an other.
+
+        The keys must match exactly.
+
+        This returns a single Cycler which is equivalent to
+        `itertools.chain(self, other)`
+
+        Examples
+        --------
+
+        >>> num = cycler('a', range(3))
+        >>> let = cycler('a', 'abc')
+        >>> num.concat(let)
+        cycler('a', [0, 1, 2, 'a', 'b', 'c'])
+
+        Parameters
+        ----------
+        other : `Cycler`
+            The `Cycler` to concatenate to this one.
+
+        Returns
+        -------
+        ret : `Cycler`
+            The concatenated `Cycler`
+        """
+        return concat(self, other)
+
+
+def concat(left, right):
+    """Concatenate two cyclers.
+
+    The keys must match exactly.
+
+    This returns a single Cycler which is equivalent to
+    `itertools.chain(left, right)`
+
+    Examples
+    --------
+
+    >>> num = cycler('a', range(3))
+    >>> let = cycler('a', 'abc')
+    >>> num.concat(let)
+    cycler('a', [0, 1, 2, 'a', 'b', 'c'])
+
+    Parameters
+    ----------
+    left, right : `Cycler`
+        The two `Cycler` instances to concatenate
+
+    Returns
+    -------
+    ret : `Cycler`
+        The concatenated `Cycler`
+    """
+    if left.keys != right.keys:
+        msg = '\n\t'.join(["Keys do not match:",
+                           "Intersection: {both!r}",
+                           "Disjoint: {just_one!r}"]).format(
+                               both=left.keys & right.keys,
+                               just_one=left.keys ^ right.keys)
+
+        raise ValueError(msg)
+
+    _l = left.by_key()
+    _r = right.by_key()
+    return reduce(add, (_cycler(k, _l[k] + _r[k]) for k in left.keys))
+
+
+def cycler(*args, **kwargs):
+    """
+    Create a new `Cycler` object from a single positional argument,
+    a pair of positional arguments, or the combination of keyword arguments.
+
+    cycler(arg)
+    cycler(label1=itr1[, label2=iter2[, ...]])
+    cycler(label, itr)
+
+    Form 1 simply copies a given `Cycler` object.
+
+    Form 2 composes a `Cycler` as an inner product of the
+    pairs of keyword arguments. In other words, all of the
+    iterables are cycled simultaneously, as if through zip().
+
+    Form 3 creates a `Cycler` from a label and an iterable.
+    This is useful for when the label cannot be a keyword argument
+    (e.g., an integer or a name that has a space in it).
+
+    Parameters
+    ----------
+    arg : Cycler
+        Copy constructor for Cycler (does a shallow copy of iterables).
+
+    label : name
+        The property key. In the 2-arg form of the function,
+        the label can be any hashable object. In the keyword argument
+        form of the function, it must be a valid python identifier.
+
+    itr : iterable
+        Finite length iterable of the property values.
+        Can be a single-property `Cycler` that would
+        be like a key change, but as a shallow copy.
+
+    Returns
+    -------
+    cycler : Cycler
+        New `Cycler` for the given property
+
+    """
+    if args and kwargs:
+        raise TypeError("cyl() can only accept positional OR keyword "
+                        "arguments -- not both.")
+
+    if len(args) == 1:
+        if not isinstance(args[0], Cycler):
+            raise TypeError("If only one positional argument given, it must "
+                            " be a Cycler instance.")
+        return Cycler(args[0])
+    elif len(args) == 2:
+        return _cycler(*args)
+    elif len(args) > 2:
+        raise TypeError("Only a single Cycler can be accepted as the lone "
+                        "positional argument. Use keyword arguments instead.")
+
+    if kwargs:
+        return reduce(add, (_cycler(k, v) for k, v in six.iteritems(kwargs)))
+
+    raise TypeError("Must have at least a positional OR keyword arguments")
+
+
+def _cycler(label, itr):
+    """
+    Create a new `Cycler` object from a property name and
+    iterable of values.
+
+    Parameters
+    ----------
+    label : hashable
+        The property key.
+
+    itr : iterable
+        Finite length iterable of the property values.
+
+    Returns
+    -------
+    cycler : Cycler
+        New `Cycler` for the given property
+    """
+    if isinstance(itr, Cycler):
+        keys = itr.keys
+        if len(keys) != 1:
+            msg = "Can not create Cycler from a multi-property Cycler"
+            raise ValueError(msg)
+
+        lab = keys.pop()
+        # Doesn't need to be a new list because
+        # _from_iter() will be creating that new list anyway.
+        itr = (v[lab] for v in itr)
+
+    return Cycler._from_iter(label, itr)
diff --git a/venv/lib/python3.7/site-packages/distutils-precedence.pth b/venv/lib/python3.7/site-packages/distutils-precedence.pth
new file mode 100644
index 00000000..6de4198f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/distutils-precedence.pth
@@ -0,0 +1 @@
+import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); 
diff --git a/venv/lib/python3.7/site-packages/google_auth-1.30.0-py3.9-nspkg.pth b/venv/lib/python3.7/site-packages/google_auth-1.30.0-py3.9-nspkg.pth
new file mode 100644
index 00000000..baef7a0f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/google_auth-1.30.0-py3.9-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/lib/python3.7/site-packages/kiwisolver.cpython-37m-darwin.so b/venv/lib/python3.7/site-packages/kiwisolver.cpython-37m-darwin.so
new file mode 100755
index 0000000000000000000000000000000000000000..aa9740a8bc8f5938cd1d4279d6426863b8776c19
GIT binary patch
literal 217624
zcmX^A>+L^w1_nlE1_lNW1_lOR1_p)+Yzz#o4U7yRAj!bMP{G8&5Fa1n8W92#LBmjW
za8W3qm4Sf)gxR4I@$n_86(vv(1EzTn{17=)XNbKZJ_|$$Lb5P4Ko}4*KEAl5G^rTE
z!D3!n5X77+2*m*6Lz!Sb98mMvz&wz9K<33~q~;W)7GW{(Loh^l2@^yEh!15#1fb?Y
zI3V-Vlau39E0R+SO5#frlhDI2B@AL-g$P6kh>v0(L>NxS$0rw;6lLb6Lq*WdyORYm
zPn`#%0LF*#7#Psaj*m~y&nrem2)cO!<q-2OKqv+fA0<9u21Dui_|m-c%)FHN%)GRG
zEaCT{0%DK|G(mv)=x%}<1*Q<`&c!jr5iA5Dpy`Xj0m5ZK<Ac&M2t%!5h>y?9i7&3q
zP0G)SFDNNuh>yo^9w_aDO>AIf0O<#_85kHq>BAnz0sAXG9}#Mxct$mk1L__Rh<6!4
ze01|*P6rtqAD@$m&wT-n5QBa|C<YK8**uUxGeHJHF=o7?y3eB-VvqtTb22b6fcVJf
zL5+b^@$o5%C5dn@M)(zUK+HSA1yKXy<4-3^#fV&s8Ga6vA?7JSj02O%=0Rk^!e9dA
z-`w2%JTM0wKdAmqfSQ*8Q^3H062Iu?fzq7^*c=~EKW|qTPgt3E0ICv=mY4()2azBQ
zvqp)5p}_#eIl#zpgMoqJ#sNkK3r+?Gfm4hO7Ay=59_JVt3OE@Uc3fv<Fkpk^PYDJF
zh70!@8LqN3FdT?yVu<*}z`)JGzz_g3hk=2?h8==+7#J8b*cljdq2kymg;*wr6<8#s
z85kHq_BaOg|E+c~dD69k&xl3;j)e*X1A_oaJtr3f8$TC=7K{%PV_;wa6)PbBfb@d%
zEn^!q0|O(2era)$eokf*2q$Z6>lx~$ROVzRfz?|;?E<N1Okre2S07wiT#}lr=LFRP
zwn+_Y?~A*P3@;cN7&M`LP`H2<j1r?EFd71*Aut*OqaiRF0;3@?z#$Oo(fQP)*H*QX
zks<bFqBA2ynjXJ=3)oTo@-7SvAbH1d$1ulG$B^Iv&(43l|NsC0|Ana#BSXsp{+1>t
z28I_Bj~Kw5QV{324uq2d;?%J~IME<Z;5`V(55#dvhHxA~oNy?|go%N{qt{kDnvvm!
ztTQ8nN9zIp7I`KH2G7pp9=*20AaPb_Mh4H$BVehoj3D)_3z``jJi09mzZqV7am5KF
zeH1JWYU+CQnm&qRWbo<y?$i0`MVk{NL+b(lj){y646cSxJbFzp!X$sa$b?E3f+hb$
z<PX8*U%l{z%KJfVwQ6Q$@aeYj=rvsqQ}gSEDpXB1L=7KYO%Fm1BUDW;M9rcmm^rx!
zHFq5$4vmMXDTAvCL8#dYRTBtNlLS{|jZiZcss=PF;nB<L)&vSE0gql&If$AUC60^?
z9=)ula0yAML^MP~1uh{0m2iSc2*V`=p%R)92`0D%A5=mBBJr^iY`NiWk6u%DsMJ>n
zMuyi8u7;q{_WbY5P|D@m?aJZP`Tl=^fp6=R5<$=ZJSwH!p4~1g96p`T!J_|5_&xvg
zgH*b6_;mjNA7IdV!=w2KN3>&%W2|GGV|?sk-`4*nuRw~PdUnGUy}k(&y#f+Fk0c6K
zyALF~6G?O}NbL%c=wc+%=^)WbAkkhV(MFHX<1bo3k<fYkMGcfLfzmlpIt5IFEaPu+
zVPs(N{LjzdY7Jt*9r#iQtmgO&3os2)D*;jhQp*owAk;E|)f|7p1EwKr-!MSUeGFnC
z)LsGglRJ;UcmSpkzdQn=eOv$Yx9k8Z1)0Ad#6aj-09JGS#TqaTF~0|-1f;eZ#DJ@P
zT@DHWkQj)r25UI}0@O(c@w37F!>=VgIu8>a!=<9Hl|Y*Hz?zT00QIWC`cb0Yqx1fY
zKX!}^{M;b5pk7+qgn$>T>=_wewA(W>OaK8;`Q_1TTUyG<@M4o4BZEgLt7bi<WSM#(
z_V5IdJaYN=|3!u!BZFsmy?{sS{}K_;<L=;IrN@8%Qg)AScMebr>@Jt^ZT(hK@A#ju
zw92<TjRTZYGkv@B1bi)@mP+|_K7SnqD?~wVWUyys@U?tXV(Hl(`@ysM=m(G1+a>S*
zbC<sI=&n}qIPMG{ECAWZ=F!dmKY-(#GY4bIQIGCwi5Exg7#Uvg^6akT@a;?!c(K(E
zT$a693w9(pX`?3n{|g)#8ITHbkpDcOzTxxe{NT}f?f-?=10_tchYex=_vriqQ^M<U
z+yUlckQ$JeWB(@@q`~|HG7lbJ9*u7zK*_T6k4NubhX4QndvwcoXEQSR^j16gbncz-
z@Be?F-mNqKfh1TTWr0c>4+)RX-Ug6J^8p8s&Q`Fl<E<V4{{IJ6IzFA>UTn5yWbiod
z+5zGpcbx#LZ#;UPB|JJ^4|sI8Li8Ta@aT-);n6u4q~E8r_5#eh&hIaxLHfH{e}U|4
zJ|N)H>3YJWvlZ-$<E=NK7I_?ReSl!TKrlZ*na5jy{QLhOG+f}*nL5L_J4HnRR0;|B
zbXN#~a!%iLP}=OB3-YAr|MRYfZ^8AYUw6q1iBFFF0&WtZsNZYD$gm4EKIH+b!Az|}
z85v&u<Yi#+>~<6I=q`}p-xkL4?GR&$ghzLnfJb+k#A{LH3fO_cqxl%8$HiA5vkvky
zz|6Yu(QEoCgpuLJT3!YQU(5F;yq^D$f}QuW`adYp!C?<_hKq`XPbVxEUg+6?is0B8
z9^K9o-3|t=2TBy0e=wF9HveEMb@1plT@=E|unRP@=F|EAMIbK&!)q;%Ueig)qShc$
z5szNeCS*}nUQkVJS^^T|V_;wa=@$fvb$d&E;umn#0OimP)}Rzny8sl)CISo$-&_|k
zmautr*KT;t;?Y~X(4#x{!V6VvPz8P*EXN6wdvOq?ts4>f-L5-e38xbr{2tx#82$#1
zV8#+2kM6yoNPW>_4aqP*U_(roW-x++l*6NUFDN0s&;$uX((Vg)u+kqMjYmL<8mTt%
z=sXN|jttm2M%IiBuLWQ8cyu0q9S$NgUo&`g9`?ZQ!fw|K9-XbAM1s$i_pCr6vlo;`
zUn~N#yG?(mqq?*nBn)-wToAkS{)_*Xpu!J1!JxL+JeuD`cy?a%==|*2`N!k9E2yw%
z@aP4FAB&1dujr;=khQ!&3K$t&4gdS}x?b?<_3iKgwUxdZ9(W;R32ub6%mvjkAdhrc
zaCr2xeyju~A_EW0?<Kr1vY=|r!D?zdJbFvdym&4MY1}1(1VNUHa9lm>2+qLWwL3rs
zouEf=>46vh!Vq<qjGz?r+@tfJPv@r>f|ii-xkU+FwjB59uD$TWL=4mlbQb_O?0$i>
zUhNEz&SNj6B^Vf7x=YV|;uml|@QGj0b;5oN1_p)~6G5hQ9zG5#MHoQ2ulYEKM{ntj
z7qjFT7(Vd}_)hr5AIp0-hLItSKm5Qa{`eE0_yxT+Tsn&lJUinh{xkb@m(K8Q{l?!i
z50sv}Qy2JJekqmo>HPk3G06JP`!6&sLFM{0{+2#)eHf$i!L#`YxNv{@pS$!azQX+g
zxNv`N!N~A>t4Fu%1&?lCt5i_1T6%OlaCmfkNO<&`Dg=R|M>H$|96g2yUQ9P<WOywE
zYJYGc+8>~p>-^!-d?W%n$#&Py@aZgF@S+7|#eGojd-UV~f6vZippqmXB*+ZnbjQvB
zRYuu9-MI@qI`4a0-YxO>?M?;PQlMJR(h=1DGWP7QQBm+b?xKQTQ#pQfQQ=@L;Q}d;
zcp+{9%52YH@PlZ}*QHxMx<Qc?yWquNkjq(LGkA6$Lvy}Q=l2)p=Ad*8a-{8#|NpV4
z=a68J<~IW1r2OBbyPm_N+ccq^kpYwnMPEEI1Eo}t&KsWmuE#z3UG9TB45b%9p4EaT
zMo{a;quW-yoRPusHn^{_!VH|yO3Fd5>UJ0K_|M?k=_UY5Nf$sV=>{YvxtoKU4bYV2
z+np}q(Obdc*;}H*@uE)}RFu1@D0p-_D}WN!4N#)0h(b$Lp55-C;<nS-z_T+Bl(afO
zzh*ML?E!AL3V@;w+-e0yNay_*7tKJS`2B^V87NecLj$Ql798Nw`P-wLRVD?LMnJVH
zzfbRW&=jLbuSbSQXFr$;u4CN=e0u98Ji1w5B!h|+P>tv=@y$Jgv4qp3yE@}VBG|tF
zFEl~H(QVoUR&|U66n0!-sqc^`2FQ&smCTU4h-vWl6U;q5Al<#hxhDdo7F<K4xaXoN
z$p8Oe%rpgM2-8nVAXh`&Qw5g#{$dVDs`EaQyFeoR@(%FwFF3%XoAp0PE7&pqA0WZv
zU*G`_6Lybo)|((%i1i%b+&vgQdgDL%^p+R+^wx7ggT@VP-~ShUAp5#a)4@6*PI+$v
zawT%mu$wY6AcsdcYduIkPWvN4viR*k2Daq?i#`)@P&`jW1Vuhr>idg{ASslf0Eu9<
z*FfRH?$Pb=!l&1U-J_fL3CO74Knagd5q6*6E$rar(i@oJ(K!XeZa(PX(dol3;L#hy
zF5%J5yBegS`JezuP~w{lJ1F^hcGqXTP=Yw%#Vcd58=b-Gj)8h+FHReyhtXqW<mAII
zj}l*C_j&ZnfZXQ-GKLuU`SjL+-Ny~mfaX4M@@@h<;{OXTi2D{NAi_`^EcN|`FGva{
z3_&8`_8h3a3rpK8z*<51hy9zo10?$dyih{Y9Q*Q#5wZs%`2(gu1FQZ=Mxf&5{)<%*
z8l0wMU!E~yWPqjTU~mJy&cLTTEW*?BS24RsXI+G6=P#Ge!=A?vg4)N;k0tn94l^(?
zH2-8Q<!t^f!QZl#fq}ud^->A{H#Y~yQW=llI0ujahhK}k8s2U_St@0C(u470rN>K9
z)#cG07vS4@*;Dg&>&cR5%|Dq+BthnsIet69Q7Yll`O`z&PXnY}@^z9==S>ic|8;~%
zx1WTEwx56p<HZVZPs`IaZXT^COC&&=O4vP`>m(RTOdP)*5GY{<Rrs1<aei<!m+_QG
zXPAUfXP5$5p4%h&q6c%BhDT?ZfoJDQkK+y%pi0~0xI+Py&VbV3Nl=gD4iQj3cw*7x
zxC3aylfmP-g9B6?)BphSK@)x;S_3K$slARnfLiq+aY#?+xC40d+yl~7@agOT<%Q_j
z!$|S(*m(h5+)kYUs`|Ux3{U<KP=FNQ9MQ3d5oI(u|9f=18+dg4M|fBs1jUtmgh%JW
z9iV9X=AOV&BID5=uHa#L3MA#P;L&-?quXD?!}4M=GygV5WV7J*V>8dAJ6r;6riW#?
zK(TW3zyBo)-`o=fO4%GNivE`hdGtE{_vmz&@aXhcaOf!d-}xc-u*d&X9-ZM}AyD5B
zZa$(v*Zf8UWRi+w=Qoee-#*>-5<cDe20q&L0v_G&7M_+zO9VZ-iz8mnU;vMwy?oBV
zz~HzC>^0{E0sfZj;NWrA0Qb4Nog=_azTRpFkIvc)hTmSa>N7Iz69cQMz3`2lv4qQ`
z+uWo1**}l}=L|2sNYDqBAg(7oHII69mmcuYJm%5udx3u&;|j)74v+r_Jubd@&Ee7Q
z?BUVv9pJ%u!lTn$!=uyL!jti=PiMA)2je-9&f*BqUSCF!PTw6Ky}=P4mLENOOLuti
zJALx#7W3!~j`-%Bz)_;~&Dnvml+&ZP*umrfvDYjf-NgZ(jNd&viv@f-t0g=-gB3u2
zmGkKIHgN4IWpL%+_ROX8fk*QZ4Nzqb9b|Ik-}cR;o7tn=A;SY)cpypse*kLD$m=sQ
zyim|*WO#iLd;huljfQ9EHP6oPpw!h{yTPOL-itatMuvvk4GjD(>%buevfzbJuL`qA
zcMY?KOLq=4M|TS|C?Pig_+RSf(OG-Iqqp{iPv^52yn3JjG%bh*wFzoZy!fsQ;&XU(
z9^VH_`u_thI$W=V6u)rRV`Sjp=X%|-vqc4Dlt*{%0bj;{j*U+@fTFbduYynKFHe4#
zYcBlzPWW{G^XdHP+xg%Hs448xdZ1Js;_nw8y*A9<CCm!V=U87bfh_N>-SFZt$X0OE
z1}Q)ezvlDoJO;M=xGp2ZYjEomq<Sih2Wkku18D^XX~hc<#``Z8>oPLDcmPtc-lOyI
zi)$d+&igMobU_UxPyqA2Lv1c1>VNQfwMX+C3lGpJ7Y8E)gKy^%&(8Cnoqt_AefM~D
zp77B8;MwgV;cEEJL-U{yzyC*%&d?PeotHf{KYO&^t`qa@c4hGCeDBeD>wiFkhvg0a
z)<fVT^&5X5D8zg^Q#W`v{}V43bnay_>u^2b(Hncfr#JQBYjw}wI0295#~;8gC|yu@
z_J~KP>jjVI(~KUSu?KuQOHY6l^KUEN<J0*Z)a8-VWn^$Z#$v+o&Gi7dOB{RQHM7V6
z!#>@q8{qB$b&fzz0rf@?fNP)H3on-FFf#a9elFGY=q_F1srlWv^?#iZREtmNum1rF
zK9)a9xj}7raH;p2+12o^Pv;Ad<|7tJY1sqCP=6grl{8-)6wrv$4OU-=1@8wHoi8lG
z>JEcq!UUXFO>HAVDNgiR1jGOX9gy$AeGYk$Lp#5}@P?R)oF5?dW3Q>J3nRk|e{GNn
ztUe%vz>OnjNR4<n4YRxo@a+88?Ut}Ypj2T6M`@DhaW{~&7(9==flGAH<Dk_~3@>kk
zrWLx~41BuNB0MdRfC^r>2+$ltcbtZA=TlG3yPnN|Kkzpn1<f=x{}n3_bMAG~WpM0Z
z^XdHH@&AC2<>S)qmy^Jz#0mIxrzs%o>WAv$Z*ON{V8GDH-wG-$UxqL-F!=V?3V1d@
z{{R{s`J@F388-pX=JSlcop!#Rc^p2LkNI14Af_qsH-Q!a`dB_F=Jo33(eSW5&EEtX
z+4Jdq>Cyb~fk$uQ1CP${9tYpEdi+1$`K$R6vq$s4gyJ&8Z!a%`k|~l^?KrGD2<l!p
z|5e~`S`O-eqgXW+Bm%Rl@_|R^KaYd&SUvt9>-+?^>VE=%^Dzbn2E%VJLA`Vj&FhBW
z!12Q`z+%iV;A1SnFUVrRFX&?+z%SrptN;=+;1_f;PymS;fW#a?VgVpA2as3*NGt&)
zRsa%90ErcNG{0-`Xg>a@bcILjr4l`l=3@dL-)|ujWZDFft`GbIJjNhRANU1%3_O~T
zDR}%pRQl9|@q~}%Oa7Ldprpxo8j^naw{drHc>F)?@&ACQ<(<+GFL#4vJFk0aeh2x0
zfhO)}A=mw$mS;<3Js3|Te2nh?W1x|f58y0<nEn72rC-%S4M=eN+5^(J1+{2kd<l<a
z2Lq4hA57(f9=*K%QJ_3$b1?Sha&<-qP}=~d|KQQ>4jROB(1?9$td88gg_Z}Pp1A~Q
zr1)FIY6b=dfl@7xZr<z=keQayp^Q-Q6p(1IBe?T@;Kf-rMuwL*py8p;&-~#B_%&T0
zcpP{A0BWbbkW~j)Uk^Z|JMJJ~Y;%3!(RtA0;!71qkM7VH9-Y@b_+37FH2-5NHS_4@
zee3{H@3;dr1z`Bi<M<&^i{!;~HPD~{q#EVl=KBDoxATMwqsPUUEeDVdG4kl;U5LYw
zMPNfpYB3GqcRl6N{FA9v!=sls4TtU&i0=3<*9RcC^TvS&+`27W4uFO|LqC9qJwqRY
zC&bDfKmq?^t12VIYl-8`AfIez{O0<Au~gu-_;Jt(G6RGU>Y3Jlc(F!}kpW!CN<_yV
zKF$nUx(AUjeen7@s6hVw?Eild{TfO?2GfT@Mna9qRD%@KJ5@oYJik1s*$Yk|j$xn}
zRqzBi0zqRf-`XMZs_kKIyE=%Gp+v-^+jLP7Bg2bqRY(}efTFV7S-`WCZ8PIbGbRS`
zs9GAort1Zd<F25ICk9Y$=%fmcstcf~%>h}v4KzFokJ=lESm6Li?QDC9ddD500v!>x
zJWy*v)0+I-d@p!(hu**vwQOKR;^BsXssn@}$5kM17iDB%AZUmp4nvxthJaean1(>o
z2urDsM=$R)(Ckxh?FGjjp!J{#XE;Dj08QF;xn2OL6SF{Y=4m-l;^@&G`rx&_N4GNv
zBzH0Lw@v~TS>3#x5Jk`&_8Bzm(JOk<4w}QVl^Gdct_7*=4ZR3XVp$-m122M<85zF0
zUSQ;JZDU|yc-aL~0w1FV#~(<xH0O1(2S}Kqp+(OTAsqTT1|rPh(C>f{wtMXgieD*Z
zMutz0{DN$0{CcdF?u-njmR062-Y7xF*>nwWzm|jQX82$a*9}gO4_@3<h9+r+XvetN
z!<!jje*`69ttXI#tOTW{ptKN}J`8d!)PNbvkOcig36!8A<vpnXj@-Yo@aT0t;c?vc
z0t3k3$6Zf=Y4Fri?G6u6LD}sg;L+*3!K3vge;+G&DZn@I<PKy}gkN_JWPS%!BnrB&
z08Ip1dv=4y*iuit7M|eQ&F9<s1*Ey95i}~?dCt?)O`-fnv8-=z8i!}MABRWl$<i0E
z`JnmDr#JP4Z|5`5ZW$Gi*4w3d`$2uy7gs<b)_E9YE@))Y<G3@l3kxm?JdQhqvIB!p
zXR&~1r;G}C>Y*F7TB1bGqto?(XSa#UYjKGGeLKIv%ocjR$fNU~M|bUs7iO#s3?Bbo
zFMx(RSA61+aMt+5FX*h{`TwYob?ydGC(m_<N4JB3Pj~5wmz<!9z24dbFD@{H#^8^6
zv>qt2@#tlJ2^w*MF1v_T1O@$JkdzR}SsvYB10*~;oh4oidj97t<?`%y<M8SH{y)IL
z@U}-cs0mzq-~}%m0|TTVjoKPL{=)bXDB2H$v`QkhzUBob3Gqk&|3fETpynI}d8qUL
zi?<5ksE4m%0r$s|<39sjs?|<_R5}|xj=LTJrvUJLT!4T_>m~laeT)nYhPPb}-x_`c
zjiH131@QXNqub?!ga;^%bbCsG20WPlGkSDaNceQ89`Ln%TCDHc>C52RS<B(sd{n@v
zGk1r_|6?AO*Z5n-85tN}f>r?_)#dUI3?7}n7d$#kXZUu$_td=adGJ4rXRkY>$1YF}
z_W%EXeh<qF{Ox~0Ej7(MKAjIe4*q2EJoty%)AEQ9ztaN`@DRZN|Nq%NED!qdJ3j)A
z*~@xbUg2-Q4AKMYxIOnc_?+cMlLB}s?lqI)0ndZaS^RoUEIbcBVew=<0_tOXb@Etv
zb=FvLc=ftifM!fDfMx>J_~jWK`2}1*IPwd+eei_V&!7ox&*ozvd@Ns=KJ@9%-Qd$%
zy1@rDuLGLL=ycuT(+LWS_oa@$t#3;}YdK#0mj_w?*{8d7fk$`f3Xks689v>)7a&1X
z5(JtIeYyDG|Ns2*;9BB>JZN6!Z^?eo?)VR$&Bwu0Dn!ngPmu@BmwWW?EdZz5(hV<O
zfl_U^n*_f`D@e?zJ9UC1zaWc>N4IZ-XQz*f05~at<vfnNf-6DKP8SskpYGBPpxj*m
z@|&;a_fkcl&hM`yk)zKAlz8vIPyoC4A83A`WcNOi1G~4o78Hy-UR;#}g%oI{Ap^O0
z2I?rwfmE`-sQ&c-KR@?tR?y6&G-%$jx%L7BILBW2&r!+=o-BJV0;)c0H-HA-I6Znx
zH@pZ1iFDq7aaI;z{s{2w1kJ4oyr_`{m)QI*`@m(`F^}#70gu*ibpk$}-#wa-OL!c7
z#{9xm4zy50-n02QhmYlN{+6lWL7}%ky{h7Ij0~>@!2JQw|3^JK5BpgDD5>=9wow7k
zyBI)BFA4SN_Ezxt&*;(VA>h#s8q4vJ@a}YZC*jfU4Jsx}gwiH}I_lk=pfdC8>&>9_
z(d{hYVJT6v+@txh0GQA1(d{VU(Hkk?VaZYY;dLLV#6T)+TtS%_y@3oWL@wQh7cfZ5
zLABTl8BidB)U3OUZG6GQ@*n>cH;GcI*uxV%nh!}FbCh88=sf0O`GbFogTyf=#*$|q
z-F^ZMl@bi4cfhXe7Vxk<RU!=P#3VFSNidY0cpU*LpFO$*I6%T|VBw7(mWKQ-+dx%Q
zcOZvHw~<F@A%{onw-OGIPCo&#QJqd;+VGo)C5Hq5)MFl<A|E_DxjZ@p1w6W)1YXyB
zSQ?hFdUT$9ZQ{`##^KQ^2r{0-qtgql&!ZEx@W4^P@RElm2mjPV9-Sr^JUab2JUR;n
zJi6Tkj=4!N9&?aj@aXp7@aPuw=!7_r#RF__fB?jv00EHwOc48dK6!M4<1qHHN9U<y
zPA~&FJV1Wv1eK{Jg!DUdfD!>HJX^n&=z=Tluwd|H-#?FTvlq`mnYp_dRDZm<1!8yJ
z2elDiE|X?tKpK68_D4YFJ$S962W<7uad&Wq>2Vyi-H5@XyN<&{+pZ)UH0pNnHw&l&
zI{3n-G|aQ}`-@0vaL1g(vy;u^;A0jK#>1YSaRQ#*{GQ$69IpjDx(hixItw^FKw}f2
z=?2S-CHx-Uh8~>;pm=)lpY8P>&rUZENRJFOWp)_U8%vaeB&oxm-FBc{$pKme(I5z_
zH1EH#082vB1K8`Z^_HOddymfA4Y=AYKHX*>ouxZGK&=YU%&zYWPs?-sEkfW{%UjU$
z4>u0bMCJleN$d*AkKJs(oo{@4Yfrou2hA^dcJrwCc0TuLJ;~n!D*HS;&v;t8s3?>_
zE0*@{tx@6d?DkOsWyaE{uetc;89+LHJO6lg%Y&LLg^=cosuaj0hdsO5z^xU~@}mC?
zI2$d=0-l}nNNpAwNSnp{wHUuVgKy_^sM(^g7lHCOw8hf$=l_3Li)8^)i>3LPfRA<Q
ziV|6$?%WfoKIU)b|NsC0OD|A*0Szbj);4(bhMsr<+HS_L;o9KQ?K;7u+joZts3z}@
z-M}x%?9u5g;L+(V0dJqI1@&vd{VbH(_0Gc}>s~X1%HQ7F13sPaUNnPM-hYuT0g7pO
zdjTGwA;Gv(qvbXJ7H)8DeI1hGK`T!?KfLA#$It&G9-SwAEPs}`!-@(SL=^-oA1``z
z3wg9&D*gKUGAO)2O%qGTlH;HR3g!!VbbASS^g0Q6SPGVYc)iZI^$CB=Y|xs9&IcX`
zU)b=sf>ttt8UsF^&tF7Iz>~ix;}OuBv)2+R_JPuU=V_3G!obCMCn#~0fLlS{8XldX
zQX?2#qKSgjn@1<YeTTvB@fPr~lq}f}FTaH$?u&xDZyqT3fa*SY-cc0?g(ztGA5z{q
z!Usxn_h0-G14$xW2+D-Tus{bD0^p(bZnGDU|NsB**<B8v*1rD#|9{`^as^QJZUS1x
zr|;Qar{H<q4Kzy50Lq`xg?NtN+&CCZI6b@TBwkpFgR185FO0zSe?$g<sUXhCa2VR2
z2U+dWYrEBtk>Q1t7|4yR2g4whDSW*lc)ark14y>J8nVvN<G3?;=n{FIp+~nf$hBaL
zVxjB$!0lPsVm;cf=i>n98`<5?@b!G4<w-uhTNi*<?{%{Zffi_kC%1b+tM?%D>aAd1
z$H5Dp7{F=l+l$Ad;Prf<L34)Vt}{R_Tcq`T5WVpAd?5YM^?YFKI={c@2I=o+)dJZE
zPJQ6jUSL-cTEyo8S;Yr($#L*fL(mvFc%2-486S9|Ywui;Wsqfjh_nRCe`xFZ-ik27
z*5DoY=rzp(O;Bh)1EnlbwMyE0zK>7;|A(#(1g+;&0-1I9>Hq(j>-ju-!ArwH#hZ(Y
z#ET42W3IP$0W>Z^RmsyA|Nn#6sG%>XyCw|E&&bQ^ia@#{i`Bqk59+CPLgK9x91PHf
zYM=oA23{lySxyI982Q3f1d<8<3WHjhreUCAz2;+}<#ZskUfco+_wEHn;)@@`Af*JC
z(>)f3ET?<T1!~cTf{1ibv5MV=;4VfhD6;UmvLE6MkPlu2gV^1sYd|A(NG>%12}51#
z2V!^Lf3ZObwJZU}KPW$=jE6P<Vl1(5{>567-TaG*zqJS4R;varN4g1Gt_5Bc7b*l9
zSpttd_txHkE^G^6Wng#-UX0xh*02JuK?SM-G$P~CTl)a2L0^u60kYZ*qM;nBf%C;@
zL68k4SsuN$FQAH(6c`v@S9)}_egq9Pbz3z5VksB!NIvAz%X{1f6k8$(U$cAkih|~v
zJU~TDw}%9{W8LilT5NVm2Rd+MB*@6{+63eZAJ72ctrs`t7#LnFfNX*aXecr;yyo}l
zt-S#iSPt%e9|pCfnvXz6Z;<DAK<zR3d>mxH(-XWN5j5Ax;L$6}51QEP<yG(kIl-sb
zb%jr_?*{lX#Hj)xzw)<afNI$8+7&Nu2tW$dmJo2QdDN#f^@L9+s2T>fg(3t%(|4(^
zhEIHYO?v|w8D0w`vh)Pc|3`c*AD6iKch@|X@a(SV@a#4Nts#5<VgcBi(t94=tm{Ez
zY26l}HFnU^wW(U5fk@F~8sJe{egQ^?m&d^|AG-mxe$Lnf6hYRueYT*z9Hl#ax@!;k
zbf=yu5%B0Xt+WNz1z^)l96Uh%mrmCUuZ0W`fF=cfx)G|ulY#$0^MQOIA9jL=Q<8l;
zvjxCQ1VKY7>7bZ{4g{8}dUW%$fSn2(W`Cy%cIFu^uro9G85v$HgL*Vg{EQ5shTsLz
zxZFcyMuyVKRTE!?@Pos#q#b=-K5S_k`k2i83;+MWW_is79(zI81hOZOAC!YYJ*Scj
zpzb_$J{CNlEC3#P`3)LR_UJZU;}2Q8$N55o9~8YMf}jKjP0HIrNg0&DUO*GrH$~7+
z|04pRMn9JJJ{I$R;O3v;!!%zNnt<WvtC4E{8$X!)Gce6h*FrPjO&g#2kcEx^JbG(?
zeBu{ibu?mRcyR?ZFxGrT0ChnK?(mF(yXOWkrhm23-1AKbpL>D>97$U5;s~1$)&w`o
zkmrMSLF=o#MXx~@EGzJVY9sJ`Fh3utdiegrnirI0P}<Aj_K`=gsi_(x!wY*}P}#+5
z12PC)`(v38hQz-|=Vee5>#hP7#y-BV_&LD?4i5>?&_(CHU7(p2k7h_xdMO8<)2#-r
zYEA}e=mxK9=6o>)kDgtim;q(?URHBtJqdX9v~e*oK#PcMOGXB8v5;iR$nZiNqDkr#
zzkoAnaAhBg^Z3Cl=;0&(r92+ptj?fy&Hq6Q^E>aqSj7!mX{yJ{XTZo%dZ20_D38B*
zQNs<&?E<bZK*O=C3=hD(3u=EO>N5e9@RIR?h1U#jNO*a4-uLPJh8kY)z-1mdyo^B_
zVBwX5N6#ugsNZ{8*^%`a;L#%{2-Rb4Yi$7yFCz;^h8MrNz>dQaUiu;+ZK&ZTY{AIj
z0Sm7xE=YLY(?bp~A1(snh14I{aO}M9(fJ89Ar2ap(*-4<)&u-4ub=~Ey{xCaKnd0Y
zJksUH1xoLb&5HNH&EfAavYvz1VY{e6ma=aLg=jBm<<GGf0Wbgm-w#^5_TmZ`sOJD(
za*SBKzTw5)_y7NcXHY~SE9#v!Tso@_JUe4lBtR?HYd3(lrnG?ObbPvVclcWVEtU4^
z{QtTbp1(k)bO9G=eD!@vBpLgWOt?Vfm9B<QUYlcV&2u$;0vddido2XoDB;styW<5b
zC;(7L|9vnPQi19Vms6n8xZ^KuPJu$_ILMhNIU(zWj(c=NYS~JVpP;MMV?DcF!K>6k
zl_)5kJA>-T{~V>(Aljq5Lc!y>1A2E3B=yaKgRz7c!~zZ5zewQ(#a8Ww7tx%cg*2~A
zmxJ66+FbO)6J$Sl&H8b$S0z9l2k0)u*WfK@{U^bW1$i222obcrBb5VGs)0O~eG+*H
z5!`<RwTB>G)85(#pb{Ul-saki`=G?odZ5G;v{b6ww!?#w0i2BfvxAcZ$eWI!PJlaT
zhf#!^1ZdI*)QA!A>@E{{u^X()8I-O{xIu1Gdo5*n`?Z3{f9_H)P`kmS^FDa?1tf>$
zUP!ZzBO0=G66EIXCm@lr{sh>6AOpk^20;8i86t;f7Rc|J>@dIQpFs9Icsu}H{vhQi
zkRM!6cy@mA=(e5V4%z4c+Q-WS@(d_rfexPm_x(XRrn^Gm#RE1_o;nWR2(l6sV4blS
zKrtiW)14vU+xoUH&8PD}Xpq{g*W@h&Xe&q$JE&a=8DE2{EV&`!(aW0e4vKt8R-Mhp
z$N&mpH_vWA0qCGKD6B!D9O(+$9O5A1+39j$!sGv8Ps_Wd+dxUkr?UdQT>v!l6zQS@
z@(f7T@z<)J|M@}NnLyKC;9YwE0}MdHh!hX-eH#*>(GKXELQv>i9*2a!A()Q60O}G6
zfr0>a(<{h+kN+H{+@K+U(CQ126`l8Aykdm~|A%7`gI*j1wZ$Owm+0XSD$hJSzj}85
z^XO%bcLRrqtKk!Hvg=?42MK>mCaC2NnvQf8@c7RF8YBaSjH?8HIIE5pBLly%vxG-4
zXa?h(N9U~<k)S0^oyX9;>kHaS^8^%My{yY!!S;YHQDy~2C!~nH4J{(wAA!;xD8fN0
zU<+6iq=?jg0xKdnf*gmrzh}dXU9X5KB0+1lK*b5Dhy?ABdtCrYAE1IKfR+66QG}JG
z^6@DPBLhYj0!{xQFP#J>3YDXfxRf~x3L0?vxRM36eB=kuID<Sz<ucNN1r%Mi7hagN
zpqG)VAUE*71}!mt(Rc*xG#q7RGEA1QR1Mti1WyIOmI8Us9U_O8VnAhyEHf-ID<1(R
zV#xZGaK|uE`3V{>_2{+*om${(_zj$zt};QBENFfoT#7micyw0?c>F*9VmVB15hy`*
z2SWx%TTj;2dvpg$c>HJb=nRnXX#UAqlIPJ|`QNeghhy^t#$BMLHK2t)B~Ct?#|<wb
z`jX)C%%i(lz@xk5nu5oFHjmDd3kom+4XA*INAn>j56kPNS3Uk8@@+i;nyQ0nk$|d@
z@chs2(^+#u0<`(aquUuoOMtd{p-h@1&trjyMnM6`cNml^z?0-C1^WFLPZ(hV_v#RG
zA&fu&JAoqF0-paFp@9j@|NlWNVlxE54G;zfh7bEe)B8v{KmH#m1z^qjp!ShRFY8*+
zfDv@ffF>g%=RbtzeBXN@6JR-i7bv2jjU(d+u$;dg<ScAC{{U!-6KKr}-o{Zc$SP>#
zNDkCEDnmpmXgvM?ivUIn@;@VK`JaK<{C^M>)5l-P90UarB>yweEB`Zqk_9OLGhpO@
zkQ-q6{{Yx&xbi<t7F+&@$YIO>{~4g$`;`x%<bT+Bl7?gFcTlefvOmK%)E?A?fK*>6
z|1p4Sbp94o@G#La@X!l<i=VT^C;k}M3!v3XZlJb+Fld1nG-dYx11)rc)Jc~>$))rA
zi_Nz|zJsMq(0Vvf!Urv6n|T+OGG+gR76f2vwyS~GDiKVXbs(#tDN_WLGSeaH@xVXG
zChrh3+UqU<7(nL-fOc^rpQ3@KsU8LLG%-!}nfo9~apFEuh=7x#9LN@C(56coHr03j
z1@-P~FTB|Bmw^Ga`UKHbpZAx6;Wgf-`jfq2$70mA|6lC-1508D_Wu8my?g|X&1?X*
z)R!tSGQ6<(`u{(;`FRvnK6M_3CIryHUF=VgHCRi)86d^ItgFFW@F8tZ^*@Mob_*2w
zp!M-LK_<Y`*=DdNNZZHoHY}ZO{evT&?R!W}+Xpnt3~DBT+CDO%be8XG_~d0h0|Nsj
zy#@RsKfQ_kAvV3e`VBg*1f`C{-Zj+R14(a+dq5!rPH*deGk~{{z`BMsYx=nUhNd~2
z-{@&h3*-i9(bu*c>@*xjUj|GTOVQ^8k;78-DgA<_OO4&2q7Qfb9shWQ6=>`YTCi>V
ziAZp;@rYa3KyHU6IOOq&12<p^&gB=51m}C7m;^^)JmSGmNO~h@JYvaDP<kVJJfaj7
z0K^n>TXsSc-I|@C&;chpOORExF66HKfF`)pKhP80&L4ydx&J%Bj>S^QUHuMAOt*I+
zHwO{p5gyHNBtRR{dTVch3c5Ygpdp8AFMK|L%es<iP-D86)fP0G22D(v-@%Cj)M<7C
zH({$0Ekk=~%kTko0N?cs*kx$R9OO{R*SxNV|6dEa8vX}yk(#kS;8P7Gkj5qCcR(UV
z9O6%qX&B=Yzqf;J!Z7Rqi;Lf2VQ_srC_{mkyTJPs$n6P_&VP_5MQ<&*(BB}%$ne4$
zH1Y=Ojy?kw$)NK-Y+sl`$7kZdfwC2NVXQQ2D+9E|Ho_S+TqEcv0dg<W006j046PNW
zg7@A-Y6U^CX^_?jsKX3eKV*6t<SSSv-3xL8v|TKH6_!coe*?z{XkH4$IPL*C@nXY^
z6}O4Wq@ekESgoK2sue0hwL&MTR)8<3h13rQ-yr#woc73<ub})2E+`4KM^1euvVP#)
z21yl++dyFi&Z>PNTadCU&FTl$uh8r&^A$b2a)I0ctsm01f}IBTHs&y*2TT@A{h$Mp
zLrXfKt})XWSaRmr3QEpM^#hjq5F5}gear79oG*5KW?*PNP$CMM4>7z1n;m#5_!4wZ
z6L@ex45S)1xZjRT!-ltT4Z6r0!f<J*{|MD!ZJVM19oCNqwWw4eI>ceqAco)IuHyaz
z(t|ptZ?AwfroZeHXtRkPE0;JUL+RnFgP^SY;zh+L@F_2lIgqtTb0CP$9=QJwZtr0U
zFCNgYesFkoe1e1*WIhBbydFLWxeOd$Y9Q6H@CwJJVF9S!2$?zh3)<`t)1Zb+1OE@G
z4c4|M^3d?omS<#m@#Q1fP1wRq`!`4rYIyO>BZXJlM@V?x5=9O#kB^jv*BwLH{80qN
zZLs+^$neP#@YL<gb)X(PBm_Q#G{8bY1&<!D*B}ppgZVhJo(~_uVSv>hF;HO*vd7x?
zw=6W^KFKmNyx0xVgqS}<ao&7z`a})8%b>-|u=yja50J2%CxSeG#Pb0u?1-FiTVntV
zFAa#3K*xpobbf;lH$lUz?kU{whd~-(;q~P`#P5(aiK@ryGhEMfWIYG)=vf9!nbx+a
zWuW18NQRN&MK?qfj_}$KPM@gZwO9rzt#Q4Fgjb$0a(LZ&hd;c)+i?Cf!1pVD;umn2
zh<&;J9RuWi6)+#9$fw&wz_;~eU8QU13D8)bC+OG;7Q;)gJ$y9}8eRge-|^_Kxgg=u
z?E@N^D+i6$`%8e1S^LBv>8J6DKjMT(cOgjBQ37<F#V7tqN08`2(3qX4<;Bv2paDB+
zk8YO>pbd~75}-XX9^DQSpZFs^zzSnuD!xNLZV%F)0+%NWDD$g(K)d6?<%z{RaNNlu
zt*}W4%_4T9te*kT7WcBA1!;rD`oFheouHKxu$4D&U;Y38`Uq6<3S`9>5sG1}95yg9
zFua})R%~tiSPB}?w?M6%#Sqo5-~t#_f?n|Gw&l<R&6|M7Rcqfu$5JA|%P0{uB>V#4
zF%-yTl!L@eQ}C)mM3Uh5=$4%TS|0qL`K1g<uJitjFK<BcFX#$hjS?)#$WS_|YQl?S
zZ@^ZRv>D!h`RU*P|B&%`MEQ&wo;jc;L*Vdy@dg|!$l+OzGdycS+F;?i4x$q|Jm<bd
z3eN~+#f=EXu<$&99G;UUq2bvj$;j{`7@`_AJa<5rSb?UubRnMQZy_f<9V9_n3=y77
zUL%Dkvj8+a%U<IQPtg2ZNU&$;b;!J%f(~fV3^IT|1**&0qw_Fmk_R-Y<;Wia-tFcf
z0XpVOz_Ytt;DsMZ9cZ<5>wywh&(3?Vh0vNPknw8J)E0cSI$;f{6@)%G0&3oxy@s`A
zY}O!mPax;#faXgbA?JU2c3uH@QdqZuj|s4Vjr-qy1@WvEc%F<!#iug?vomtTr}H0Z
zf@%kN1|HKLpj{$})3`w6>+4qIaxrMapC3F4+_f6n#o+V`3tzBXyKN6>A;Q-L>QF-A
z`|c&E4G)>jB{qB+aJu%|OIY~cS%vIc+~FGnTJY8l58qmlb6O9S5EZ@=paroQ6M&$z
z6u|3lVh|I}eyebWuL|77S|~0ChcA}$H3qcu4O|}FcmXdDV5MaH15ghETlty`(grIJ
z=0S9V$^%%x^)Dj>1H63oKvrCcPz);%8j#C_W>IK)P$dfLJwjBYmIo_9s|g`T(aF95
zr(ewS0DO`tWTcv35J&lH09qi8RK8Amj#M6e=7yFBY0tq{;4EJaQ0gll4Osi6<2g7y
zK{LD;pzHI@L8Cs+M-0Hr@xb*-FRL<011uy%AbLO{35x?^P~e<`D*gxB;tx}-j!=98
zYC+oj|Nmdl1uM3;wG@Gdq=5+N^weiy=ZJx8Drg=0!i|Z6;bj)6dO_@yF6H;=W)%f3
zcK*-s(hnrpdH+S_Gf0EvE*B$1sce-rsH!{g!uuI0zyw?mfW|V|4G+AO2PyBZeerq`
zIK4sgGiaF}y!<f%`v9IlVxK_**9lbqbc6coh<S1s70^UEGdFyq999k)JjR~-G0P#f
z<=9nzfAQxjEN3$<|NkF%er^G6_DAIBwND}L#hRaC(|Ry>c7Uhji77AZmf><|2;7}f
z%aBVTaQK4rvjFI%Qs{yhJ5EN17Z*Vr^jZ&;sDYM)gV#A90}ZkNdI-0J-vhLcxf8Ui
z1$3^>H+T^f2il5u{KfmFaHUA&E}#nM=o475onDIU9HjP}04Qxd0If;|jfbt_0IfxY
zy2u?g7}jn3LIpPd^8{2_gO}?{gEq6m7D@06xIXyg$PZ~8IZJ@Xr1+5LzLAfBgu2*v
zDY&EpsYDtH11(ArhPzl2<YJWd)!^_0pVt6c{OGR&$-F!sotIyncnr=k!XDkM?4S*2
z{~0`Bi?qQRtd}(dq#TkPZ@riaR$h7$Di?x~+xMatBDWPRXKf2wF%4eII9-5|;Y9*i
zvP2jqWAfhl|NnJAD4L*)mib#sKzXb4{)=~yAXzbp9g!7pyg2X(k`-^jvZ6moIVdY`
zK<lp~`8OW4@!#@$3E00k9>M)94q8Ty>E8;Fa+rTtf|bMln}U$r_o5df2lMX+erQ3o
zoS%{5MFCh6&A+m@ar*b)Lx_Kq*pU5u;UO9R%?BM10Q2vQhj9O@gBEmS`nLn59OmDh
zVC8WCmLTNzy_gG;gZcLWAJo6w`4|~qG=L>byg%^^cng5e%uVChJ5efMCF{}6+6P+r
z3%&~>;4cHi3-*T$42B0@f?^t!cNo|}>nr09!b<BOH~;^C84ik9L}|_6VgnNBy#M0b
z15h|=oB%CVH)Dl|mG6xgOCEs3$`_tCg#UxO3E>C8E@9*EF8crfKghy<R=9;PUiTu8
z_<+ZIL1$rt7YVC_PA!0i<AMj^a1`|DX1xzua|{kg(CL2Q)ZWYL1X2tM#XB#G!HP>S
zL*?`katB_7L*%xD<*aS1co`W=R(W)r7V<JOys&~u&O*u(CB2~heg8hl+j^|dEQ}2N
zt>vJ-|6PTkt2Wl%2N%`+Ey@4?{|66bfgFXL|2=wbQ$d}MOCXh<tdXG6vDO3p$otKD
zZ6Evw9clpX%!NVJSwhu;H*S0M+TKN|FoCF$ho~qi_vp2Kgpd`3$Z|quBRzU;A0lLb
z+yfPcte-(+POS$@tUY>dpCDv!L1gbjWo12jZJ&Z<L7k#~5V?~OIsU1K!5c3@zC}bK
zsJ?)#4{ts$@B%c41qw;X`FkAEux)bi0c%Kk4qh+h(Of6MP@?A1&3b_ww4MQUbB6%~
zBg2cxd!Ud(T?GlMi$JGl{|9ZJ<^|Olq9Dh$9w^-pT3rY^s5%28RB{MZL3M)0+XeQ4
zM$unL-)CUh4_bWsf*TZ6&=Gm~Ig`*6_F?Bxb~b~Ky64wm{lLV?z%SSgGU8<}C;=dy
zXX^;+ul@7rj+gN1PFDb(NG9gddE>PYXe4+6$Q{jxB|sZnjD5PRLFdz#yz}g~2km6M
z2j0cfd_V%ypY~`z@WR9Lc<Fb~&U%g)Y4;cyUhnbjc9Zbzj#KbxJ}lwUeE5Zj<qeR$
zn}TQeJkTVG=kYcW#Q>UhXuVxx=Fx2pGFi<7RQrNVh6i`r1i1Mgoz)yK*g!7py#M0g
zT?Piv?zjri<8F|%JzH;==tGT>hZzGd-%<4+y9-K7_h0M->#s`ySu5|^?Un)A-3mH?
zq4~fEsDJ7*UQD~oz~I|m=K;E>qr$U0F2Kj~H|Wgn|F09F`!jtypT8&onPz#v#M7g<
z`ojw^&{1dI$tOIT51#-XpYarY<n?_}S62aaY#t~cQTK#`kG$RnQXug{<t_um>s6j0
zJ3H$FUWh^*U;uJJhHrP8g|FqGQt<l1Kxp`P-hc7o4ru=5d5M*0cZ|vl&*md9KzoSZ
z{^u@ziSNvC&@hk0i_Lc!7+&x6?5;EL?X0tSvE&XYWa}hAA*1Tq?WW*?5+R8A@a(Kp
zcu@wjx!VrpIvvmMIMBHbB|3=A1=^q6eE0^aI0BvL=V^Jr^u1@d-D@6rH{Pf7zi($9
z_^K69xBjK-9puq3r2Y4fA;CV~=AaF>td9TrN;u&r_=2voX#HQ3<=dSm016!`&|Fh6
ztULqRec?7z_`uIpCo+5{--d<{hi_+{z>AjKpaasr!>oT<avRyfi1spQJt1VPa2;qj
z2rFp%6H?f7zG%Azt{G%Jd&@!jLjdH)C7hs%AKA5_Wy=4VUu$}FvsQxGD7(huzkqH<
z5MA&IRKOS>cyawEC^<tfs{w`IVb9KE&A*sHN%Z@RkGDWZ+Afj;ZH)Q%V$V$mhSxI9
zzZgr6K;_S6h-?p3cHT_}h8IU~fee1V#<TO-i=DR^7+!OLSnF?rz5L?gO;{WE`7F=`
z5jYQn%1_XE3YvdKK|^#f|4zG!>0c8EkmF?cf>udG{o4s*qxd)fGuXcyK7yQOc;LnJ
z8(96zSgHZ?5hKJ$ha@5XJ#&MB;l<~hAg!-Yf&B6OCd40-aDT|pLT;;r%L}l75JyFT
zBhLdgJ_hj&?~9W+K%U`m0o_jnTG|aN>^!=IB|ryuXr3!^^f>sB*<=6NGZ5gxzy6;`
z=ReTlC6M_v&>@Sh-%6xBF23{VmD%QDDRQ&;wa52s9*qAydVOwcyuRmQd9Xy(!&=aT
z-|2*h<-t-xk8WAe!BMaIL5J2^9xQq9(Jea_v|1S&Zh;_nw*|P^gB<j4{s|mz6(2yJ
zH$3oS{dI7-fxFkpxf6E&JEXnu(fI+qY}-`e2S}esZ|Dn;Uf&xYoyQFiyokCE@iu6k
zvq!h<3lGTM4XqNOst>x$X&qQiiIqop?G2CK(g!d8Z~Xti<v@w5M|bH9kLF{bF>lug
zo}eQZI={aN0+qJivb#Zxf&VkVHZ;8bS{Gt6Z#_t?8~N&j)Q?~v^}YwW%<#aAyVt-z
zLi9X34};RZ6euhG1o^hx_K-NpX{9e-9KOcD@LB_u@a;g-x4_b}o59kcg~tB_K{*h#
z@^1b$28I_0u7m7&y%2Ol?3U}GOaNl70_p0!|KiRySSENh9l3vumi}|WrbEMJ+BHzv
zm7wJJBSDb#&kP=V1?Trp5F0i9e*pV>`a6(|K<WP~%-68p72y01OaCAzcH3SOgQowh
zp!9zYq!pU}uR+s4+zav`FTg8Xoaui7_+$aFXLw(nybAIRJpD&_cDp5DrvL4^x3_^1
z-t?b+&%(n}L=q|eOIqMa|4fXa9m1UlL74-b{y}T6Vd)>lMos_k!QmGF7UX$Q`o97W
zH~i^e0FnN`g7iVtKj`3PQ2M_D@iyoHBS<O*-(AuQ8Y+Vn4V*95fz_0_d347<fHv08
zGD5pNM;I9yUi3l~fVw=OW#o`d0m~0e!Qd3J8nhbuKQrh&bWkqiEe7#X@<QZ0u<t+<
z2%vO>m>V(l=;n2SDF@&9p$AdQ1IsqcE`tLCHW_{xlpnTThIAC9elRfbx2A#GiCqbx
zjzTL)aqEE+7Q@@GPl38YKfW_CymSS3s}6&*lR8LyHz;hoZDT+KW8I}MUW8s|V0ck;
zg@NJq2~fT%0!eq?f3f5;EZ?k}f?Vi=%G+R%Zg&TtZhsFS%M<)9>frWMyn#=*yM|Ba
zO&`sJ{4EPXO~r#hnLT%{UIhU@{Od2imP(u8(aqs;+!1n`UGo76unA8=s=GZnd^(T$
zbYAl5y#87cX}{VpP}4@k<KladUYU9iOP+h6^mpBZ@w!KE$vuhJ_mB%n2L>O@dnL0y
ztoc0ooj!xu{h*VWASb*CcyxMcfSN}>mM2R4Ul;jW-Y609?B;lFk1zt=eV&#FOW(Y9
zM=0><7VtRk1b4ru<-rn7pUztzmL42M%s!omd@OI3-1g`W5&-Wh=r-`^3<KK<GU&QT
zH){=OiSU00(4q10{ObYg$|OgFk`rqTL>@HviKwrk<$Z6t0BF<wixsg94BuRDfDekU
zeE~W|wD#tUw--T4<uF(rbPCvu7oC^DCQ5-<ZyR`Y-Z#ATVj@UYH|sah(&+yTpk^mO
z+PsZNH|q_MEPRZp^Z1M9lOW@0t+z|GLCPU5f5^BScmqcpL;;fJJeMGaSIk9FIzogW
zB>gnMk??@{=D~|wF(BVOfcOS{Bx>!$7xEx?g3hFV06FOT!;9Tu5m4I+bmA%aylS}I
ze6Za87w<2iT87*n1>Ili(fp>uv-2DHlrwgZZhsE&q44KD_rR-v&EqBd9tZz3d-AXU
z18N6wVAlUPK<N)s|GV4(*Z-hx{?-B>{7wfwEKl&a9s<oi9&ZKZNd^W6hU2Xn|Nnz8
zH}g2&3Oa!fB3J<v?5(}v(K{EU8hp<BCy&mXFP<@jqOSA;xUYHZ#dHo(+`1m<3_Stb
z_0@W*v=)?)Ku3pJUM!JEgl^h|=1LA0{#Nj<fXxRbJS<O@zWu~6;2>dm5;UH&4zy-y
z`oT~90=%ogFfbTi0*##TPd}8#9}bc~!S8me<s^TLFB1bpcQ44<oxUeLx~GC1=g}E@
zp!Fnw%SKRU?JYe3njWhMZ_^FE0J_Wx)De{zW?%px><2qv6Lg42x9<s$PSCAVt_>c?
zT|u2&2GD&tp52hk4>S*z*m@lN#q6;kR#bTKum9!I9om2;{L4HnMNq<D0#yH;0G<Eo
z+u&i%Q7qsIy6UDEr1{v32O6L>m?7|57#xnrUYzHE+&2YUFabWS%C!MB%*X^ftjZO1
z5DbIIaaYiaJP-|bDeN9P&u)27P<rV+=b?G8#0J!8^yFXv#iR2R>V<6#pc8CcFM-;N
zuRMBfCV5!0{060$OCF4$JbGh(gVW3D5>^ju=3*g_-p~fH%kI5M1#QR&uOx4HvGWsX
z0Y@t+fp~N}D|mE+Qw?}QWj8ogc|hXEqtjc!qtltgv%9<k6#9Z<phOc3I&c@X`w5)8
zwV<7BDGz909kqX*b`F%V?!Slw(alFHK;0zhPDIEF@8Rb_-Ag`?PTv*q!!AWDK#QWA
z4+(g53V;uMI_|oH0hB7bO~pVm-J<)!%Y~1>Fg(Y=@Dj9M5@Hw^+%VATs*p3IM7=>;
zPz-~d3es)*5@brZ=p+!;dHltbvkVL`XEHD_Kn&Y^7Ur@Y2$%f>sYNjiwCRGuquX>2
zNTyrV6-0F&e=+?m1H)^7(6~)+?T*)S5XOerq7cT47p5Qo|9`Cx;WfNa2l2p%eRbC&
z1@He7))zI$AR$<C23+)j$D1JIQ{AiqAj`mK7O;cXV!n6)T1yMQDS{(9_T{}Zp!N2k
z@(|P?gRMV}@aXhi0Gi_N{O!>hy2Gbi2{Mp$=nTlaJ}L?xI7X8?L3b|2&H!(R?qv4u
zE>Yp|?aomV@U?u$-?9$01E$yAz@yW(;Wbi+KWzeNj^B*~yoD5WBb%=z0|Tgm1J(t)
z2p1x62pVE#E%?B|z(4(f;Ym>Wkquc2b%KBTK}6|su;oDM0ncuC1J7=M3s2CE8|OVV
zkC%9Xid2t%NEsa5$>Ii$H*|gn9oeM;>0e>X--yx;yq?GMM2WNqytFOh_vudH=yniz
zZ2}oz@$Fs?8cp-;bx~pT>}&_IJ-TZfKxZTgN`Z<4h2svO+aMTrfL4Bijw<bRJ%K2X
z&Ut7aEAa+pZx49W#e;wSCs0P`0o&hs4KyUE0lJg1*CxovQsqbSYme`c{O$4sqdfwu
z5n85$F2b}{fYlH!?cfu%3Irg53kv)fTSOTcUN(WnU3d6&D>!z$s2DW=VB&8+13G5^
zWa|O`DF;1zS)psESWb9$Lp;~(!RXo95B6uL>w)90C%`92bjL36=wv?b%mL>0mLBlw
z2Gu6tK__xIyhs-Zl@zW!Ji0+Q;jx~03vN+5Xz;gz4yeUy@`~fG8$gE-gEs#jca{J#
z(?EylrSXTeYJgj<r7J+4dLD65I<2pO8~_Y5-a*5ow{*pe&teP=FG1%igIvPj`kMhf
zme1+~vfGhg&qYOpzim0_5Cy2eAf5r)<I(HE=+S(D2^O85zA(phyPojq_Fd5F$N@f1
zu#?fF+jWIUr=vi(>xNE82~f2JY6pLK1B&?XFA6{fWN+yTkXX)Jh**pmR`(uw;f!5k
z#S1Ml28P%6SOW<Z{Gh|VL5}k1WIXQ30p|6V9spVS6Y7kYq70xzvIX2EU>T>(0Thf6
z-!L$wIr0mG24mSgL1#Kzo+wdE<JVw4{)T}8I)3?yU%&%2`*I?UU)Vzc)S3m2IlT1f
zcAbHC1>kFr*KF{^dcg9~k^^*xRyXKgloBOSYXW@XVDkY9M9lzF&{@Ip!s7%enbcl*
zVGm+--ha`599+mD^+3V%DW07de7ePay8SqOEl-sw_;g-C<k{v54o3c#7oh5)`G5qd
z75vt*JAk9}Li2-vpj$^vd^%6~bYAz-yinriaqtJT$4;bD;)HLvgb(PvjdP&G5W($X
zkIs*v@&Ym*!&2sBDRUf@|E_^9|LLtcF7f)FkLCFiInQne4{LT$ey5)xE<bd-%3XjT
zoM{}p10-NB;BN(uNP}FVd7;D?(+Qs44gwyX=U?+d$`{aWSgrp{RKWcalX_1}mQNu2
zuX`|__w5b&1Zt0TGl1F`46%nnp&<v-?`h5C%kOj%#ODJ$#gSjnNuUgL2k#~DZ42-a
zMX5gwK*NVf^~VCx&USeHG4B|T`r|XWeGB#|IO|}P4v-q9;23Dr!ckoHNjI}cXY2-_
z?qUv4&0{{DwHNqXK>Nl#zy~~k_Bi;P*<%M%RNe5{0qN+i;_|RO%-?>M4U)gOz_uLm
z>~!Vu>~v-D=w*ra>}3J*1$<k-)fs`N>=9A^{UK;k+QIiMKAq1!Kqq5$etPlaC<B9c
zFH5~ockKn=UX>%hovEPgz5%oj)3ei60-W_Bjm!xiovt0QCLE$r2iHL!;5rDoPzN<K
zJ3x(0VMKDBfZE>2Rv&>|n3g9zI%{`;+npVtb|;TVZ)gW-NGeAT<nIgtNEq`R1%>eu
zoMG&`!K0hQr@Mf|Q}Y~uOE)N&bow^<bP^8VdRCA<-(Rdc0<z&K=)`Ic&rTK<NC3MW
zWdMnQgV-2n5Wn~6E^PoUshD~MIf!$PGWd4ps2~Nhi;9G2_k2+P^X*&?qCg!-*ffy5
z2efGk?$07h5%B!a5lHLk-(k?a3|K4ZHsa1_FH*Qc<u2Gv(1HQbHB6um3hxUB21ows
z$3QI%&KICrWJnL?0IaBV;Rl^W-VSmE11Pvin~y3x3~^5$*gfGAzTN%`pse3{2~+^N
zV&wjVpnJ%CIxl&29)HaX@n7c;(0GItIRAsjBTQ~V^Z#*=-jG}1^5+zPizO4N=yW~c
zVa?>p?{v|}@)UoIDif%8R0TSV6*TTqVhCwZT<G>v==9?7==PETo#Wi?r2#&xx%m*N
zL^c48Ron&@(>~oE3ZBgekq%=8yT`Nn;0qtiQ>EWQYCJjv6g<1x!95kr6D4;1@(iG|
z_FD6^f6f1xN?c*p#Ve2FknRq&<?gr#)EF|n4LUriRR(lWy+0%9VmFYeOXp{>FsSN&
z3BH-Wo57>gbwOw7iq=c~Es%}-zhPBv>r+s&{QrWLA4G$M7l67ZVa#wYsI}wM`Rm1J
z7H|kH@aXnk;nC@fl665x(tC7+)?|AscyxlZuP5&8D}u<rp#4eU_9Uc!kinLJJuD&p
zMR2|q0^N)e+Tj5@zP|I=i*?{8VugUK;eXH}2%rEd<$vvp6h0*u(0Nr2_`E8-@CL^x
z%KR$ZL6E=yzxa26fx)%)34iNaMo=24z2MsoNqop>6GG$jkc2NNwOj?=;?(UV;0YRG
zF5m!N-LCngBmi3yjXmJec?h09FCf~dJV73oGUq|*^O6VSA&=gg^H|!a-%Gl|MW{8q
z2fx!l56kcTEuhs09=+}o9+n?U%n_jr_Fw0ZZhwVN(9ns$ghwa8N4LL*N2k94=)fJ1
z<^u*EmLE!Qdv=2k$UY_kYTq{>d*NaE9W<X@!SPZKeDJP&g=e>bXLkgLN9Ros&5NM^
z|4-D~<OC?+fofe2XnhOGZ!Gm5mL`Wl{sGmu9=#!lK=rNVt&%W^512gooi2M=-YRkP
z=yj6tu)I;C>Ct%sZ~g+!b>9ZNrTH+#y`cH;TOjuaaJY1S1}{NCKKt3H^Zkp-`#{qS
zpsD1}eV`T*XgOHx?Go@cfSvbVtk}W8@EWvX5_IA{sI-s)ojL?MG8~kpVbkw5DjYAK
z?E|Hc=dU%vHxSo?r<tWZyIs-M)^fbq4OaU8#bz-5{RK1FW^j7}+<yeGj|Q!Y^6dQM
z(ai=q6WsR#_$J2VuAozt7(BYgeY!!X5gqqL47PM$@X)*tTG#v212kaEzy7=rxH7%q
z(Runc=LAp`wcZAeXT0|4HHr4I<oN&!|H~eXr#*UIK7h-+1K@h_1gIXA1D&u9s=Pao
zy?7`DD&j>{e7jv#ID9&fduUz<HG`ADE1u#&t*Q&4z_ADAY)}66|2@0qJV56T{(CKi
zNOLIlF;AI?rOXMC{m}8Z6HxnOJ-hiltT{aSoeua|9w_ni==G5Bu{>C!J;9^%V7G$;
zxQE~<0XnC?8*~>v=)wR;36JK320oStOK*c+=n0xQpWxYi;DwLn0Z{zAfDS(wQF+bm
z(|G{YQ}Y3x?-U6->PgT?MZ&}KP>E#PgirheUIL%^BS8m5c?p1S1MslCQ2G)w(FI-U
zjPMO&z8Tivx9|kr|M(Sj)uu;h?Fo-=cLmQq@OD+F>jF>9<NPfpkm}@wNArOM&}r#D
zouJ-U?uOTP(Bd9+kp;N^_iR2Q;M19ufiMJNJ1GBpHXoPpv94Xf-=Y9Irl}irvnRht
zcRUAZ{fp*_5`Rzu<H^7Nh;O$mgGcAF*J7ypLFG{Ew-QKyU8Ky%lI0>aK8|_x`dq|P
zzT7M+@C2D<&FI1JblSu6W=X;)egS9<Sl%cxL$-jyqw_?!rvmsGanOJU=qhy3sp0~l
z7=@HCH%f1V-HV)GZ<c=d==294s-Mlqz~I<i59)OYfcL7ngStzg@e2nD{<gp19(L#n
zpYEg#kM5`fPs=;}tvkSit_M824M11%XnynU{OWn|FN<fdEBZ|=^Fa}z`5BZe4*p~T
zw}LGXd+<BmK)H#<gWvfUsEH)+(OG)I!}2nJdjUva=Y8;1EZ=v5YQEA7ufcb*JY|7h
zGtey#s<^#6eKa_{Iz=?RdP6k84LBw6m`-=?1<%f};4z`jPcI&W%?RBAs`Y(3-+6$u
ztYd;lC+G&}$O2ExFD23*ogaKOA0US=Xa=|WfI#UlkJfKK-K8r)x3?e?3V+M}zo6Bp
z-(UFeVqkcA>F@vlKHaq^Ji9@?dE_)=i=IYM^6Ljg{|syT0HuIDv=kACk|InHZU)`|
zL0Ni;W&xE#`#}vx@KvDaK&{4ZWbfFadk3{YK#YHC(EO7_GyixrBj4NkvKBN^fFg7l
zbd~FikJ}j-UOeDo0Qb+q^#gSN!vj_~fTr&1K}7&$KBPBX0yM&(#vgwB6Tg7!+&hrr
znXR`O7(DmF%A+)X4P9%{`Xe#K{2XZH!15q}i!o@<pn#+KA4|#WG=9AZfwCu{xw6jC
z6P=|OKxe-xf-LJUm+<Y5SMagC$KN6kx)QVV=4(krbc5E99g_gdt^(a=UB|&#^46!@
zO~A8T!nZqs12h<*c^@*K=*hqSuSe%kjPdvLpy6N*kBcuodRaU@EJY53%BPDSj6Xek
zV-91KPd=8HOS(O*g+2M5ZuwYV=5MKntRV$0an-xY-<AX7I`RtyfCg)>e&Ub2n8vU7
z=o5eJ6(4^0b7}k<k6wC$8a0+T`CI%zqpaPGp52}tpix|pPS+W}-H8I8y^az-mN!cD
zLFKPc=LyGdPX)(LPY%y+PYKUXPS0*n4bM(b0Z`)}RKodK-YC5Z3M9|w!x9rbn-9P6
zwY*vS-M16eReA2wTRP*#a&U9{xkqm-Xv4&dAK(A~hs-1RbUu19>nCXX06a2w0eq2J
zr|Sxj<F4R=aI`+L3AF0{3%-@9^B=grC${~(0#x4%fKnT1DNS$aiWdd^pm9V<0*bi>
zNkAnxL1G%ZEZ{{+2?Czou%Rvf*0Z4E26_b>Y{sbhhzIf{1ZeTyi%VNUy|%+1y{7xX
zv(v|4xJiMEP}B8~K$K_%i0Zum;?P!5Ul@G4@W!p6hSm2McejArXNUv_ZZE;c=Qn`#
zA&sv%+yoVW_g}bx?Yj@Tc;ThfR^%09pz(T-Ue?=}84&wZk@J_vCw>7x&^eHxlOI5>
zf(U;Q&tJl)+svmsjl<XS8Gp-3P(tbakH}cBMPcjTJe!Y6fYmHzgp3mUfm#>77hvJm
z>AS<D(-nSinP)e<FQiEa8fSMxiq8$8kquD$4IH14`Wn=J5<zc2Ap!@SJ}n`wxecJ$
zl?11%V;;Ss8$R(1`kr{f#|e&A-xHtsBRmAabBerCH$bxjpl$aa0*2s$&VU=BIM+MK
zKONL<2UWcX`P~oNF!Hy8+W+9IblgD;c|0`FgW4PaL9?xY!LtSbUW+3;2UK5S%nzWf
zzrx7R9-y*lJLnv`ZZX87EdG`ypgO)c5Zr7-tzS-o>KDxZcdwU(NAn>A56eTK{){`g
zMGo%IfY#3*1ho(SIbQmKOhvsF9L@cp^a4sBpc^$cU~|tnTIY}{>CRIgnukg}vCfH}
z07WX9{atYTToTeg|L@Ux>=VDB>w_0I?BE!6eej7t!T}PatR0{QN03?KR?vDR*evlU
zenAINV+t}=?0&J~7bAZwXzC9hrH4E;PnFnVIvpChT-f^O=<x|!G86{V0AEs9;_d;B
zIjspEofO0u<N|l_!b@0w1IG_!eFZ3oc9z}%tyyJu{Lkib+!cIp>2X)k=rw~!w>;>O
zRnVF~&6_1wpw1G?6wyVG&Wqsk^AxE5kpkC0CZ0Z)CZD18&qa^kn9tz;>fsVrA8X@c
zVUOO>8=xECK?BgBOVYYaZ+J8x<^VZ>=v!%)Y=9JWb2fm=NYL^K$o&hTsY}qZzSjRG
ztS`b>fh6z0IKLjXG{mw#Lc+82t50`22WWe}r{xj;W(m-UMdt$_%@_PFg5Xwc=mFne
z7nK)2osWDi9~4`FSM5B4g{KFo6DQ>Xxp^Dh3GI#(@aT*JExzFZcaNGu75uZJYaX4o
z7d$${IeaZ&@V8hof)1oV@B-8nL!ANuFOTkJ@$~JjIr_rm;6rAQULH@6?>9mBws-oT
zcx?+>)HH8B1A})ri#ezr?A^&?;(eUO95g)W-0fm6;N0nABH-QaV$R{+>0-j+*?jDU
zkL5@H7SR3G-rX`LV2zfCOD=($><9mIm+r;43VVV_ceTU|-SrF%uUkPaCXZe4ac_`^
zy5&7J?|^S;gm<xh`PV=9{C@;|E29*sy{35s<V{ch^~ZgBO&EMFpZW4Teeu{C3IVJh
zmM47poj-w(2Ra3s5wpC;-~Nh;fx)Yn<txZn9=$dWFE*|NRdz>Sf=?psjy>RMdAA&L
zPlROz2l##+e)k)`-K7j3-OQer$4Xql!|#x*xkyT3PU|2k%xWD2L$|YlW2dWtN4GO*
zG@s)o_+Zd(CM2Uhx}88nnF5}jz5?Ju|6W%Kk4|@vmv)Q{44^wXI$wAo{O{2V8X*TQ
zj}!6q?3EGmJot>+yO*cTx0i(nbYq?<XtEG=nTBUKyGJLpN9%!7%h#4ZmXBfL{B4e)
zlmnU}JowVyqt}Il(c|I|kMB1;dRZJidSwp7HI^{_2dz8d@a*;nUGW87Il<@AdBex@
zWr?Xz=PU5|n@4xi3y;o%7aqr*pxM6YgGYDO50B<UFFZOcet2{ie1ME`Uo5@s(d#YY
z*&W8=WBIDY8ZtlR2@3H}L(k)`3?SgyUCQCvT`S<xSt$U%%dh#MglF?n2~W$brB^&V
z@A!7-alB>*GweJ&**rV_ID9+Z1VAVA8G%;mbAU!q53FHe@a=r+)A_=q`3Ul9(V+UM
z^ZttkAbHF8CFeZ5V_$eS9|dnGAo9M<lr^C9m_WBIf+pW~g+bC_s0aW03m&Z}cY{WN
zJ@{R3q0H`-r+W0V+y!|LoD$X7fJ&E}9tU4Cf%9fB4`{*936EZ$XrEpd@SX}j@YN=V
zUwm5)O0+jTx?@kkBZj{<`QQKlFB3pjX?N{~7iOy&7+$D?&g}*@{y=-|4}-R4fDT@N
zv28WTs{1e4R)Y#MP<sVhenZxTtOQFV-#@Vyv;w_bGz>(6LJDj@xP{oi8j`GcuL2nb
z@6UkDgR~EOO}8FqV0f_~RCac=?g5zqZYyHBe*&~W7}~#*0N3$9!Bqt)rCFZfZ_WZY
z_YU}Ke&BC;4N0yqKy7BrgT=<6;qA_2h(?1)cf5c{XPkgfw>Wr|qce`fr&A0vx_V5)
z!}561UC+)!4sZbmx^o&dUgZgjI{4}ePyY2Ee0o{F{`mj@zf0$_gYTICAM<E_#$2NA
z@%`R}?#d6H6(2mhD?zFGg-3Vg50B=9AodS%s{Vhh^sZ;~4+;Jz(3yQ6%?EyfCZjY@
zdo&;XVfYO+pHO4?qqD}~hevmf;Rlb-8iNlW-8P0Eoi+yDHHHs5YYZNMWNv_DZh%-X
zK&%(tHHIfTYYa|+L=J#N4uDt}K&%VhHHI5HYYaAkL{@-AR)APLK&%}e&BqpaSbi?;
z^t8NI&f{qr$N?VM<#)f(?Zn{-x@Hkn_^^R4z62ef{4x+!LG6PT$UdFE7ks-FeKa3{
z%3G8LX2(7LAMmjJz~62Nnv~Ie2(Hfg*Pr+3mErV&jp#u00;`ARK@WcCd!Xte$iwn8
ze>*FvE8BS;B^M~J1SRdGKApc17QI%3xy+-R$)nqg1C+l(*}T_H!n4zn<K;qdq4?GZ
zJW>O8B>(#3h(gf>RO^{CcpQ8Ts`a8hdt-PRJS@+Z@OpH{o&X&$|8gg2mca6TnFd${
zdok$SYXdqb^TJE;T~(c-2T059{O!`<G>equO;>=jAw0+X^xA-PyGJj}3D9^m=(b5g
z&^S3eNL_dA1)px^*DN6SdUS^#Km-pYaNGspf#c8N*~#wH`N>D~k#FZ;PtB_yt(WQq
zeLA0ccK!jG_jWlb&p!9<O_%ThO*(cS^wIq2(R#8@8|0<$&pkRXdmMZXnmh&{J9z5F
zp5>srg(VtfH)!q{tgGAFr&HUfGnoU_S^zJiv-|-H(w`oh2Pb$mACv%%kAa#^9?L=H
z?my7{DQdY3n!|e0w~T?|MeXnZ|6f%623^VP(Rmo;aqxx5d?0%}@4tv!1}<FSGenT_
zUr71V%^Cpe&4SAm$ojAY;5#txzc{iC6uhwh=b>d-`kxby;^=>lmOr4<V+58z3zv+X
z@@MN}PzE7f{<MSCQL+5VT})#6V*_$PFS+H<iADIzAHGEl3@`qE`~UyNk8kMZPv0U?
z0NsCaZy~nw2Q(f5o_`AP1TC)m4BAE39S-jDc~~AQj`Xm+#oq#2LhRA&4ce&(>Z68$
zh8!(_m-F~q)^T|Dx@-9GyZ-<mKlks&3GfiRJG5yHy0p|=05sAFTC(if?XKX_>8$XQ
z1za_!K>(X?x0;XUBgjAjeDOX1`s*J54|!OA;%{FCu9hM7Hn^Xp!{BLo)ra5dmB&tF
z2w;Zvliq;}-2_j|ul((`kUr8|2G9_HuT8^?-h~Vdh`oKUZQyNAXlDTIG9;HmeE?Zj
z?a}M3;o0fN@e;H_7<5_RJs-_y9-!TV{OiAa^zuNvVj`VBy($8p2cI%~_xh}5@afgz
z@Bq!<t3mqBp56Q&ogAPc0jB?qkTvff3ZC8m5+0o%5}^4W&+c#mkIn$_N^0M3HlNOi
zo|ZRCTs=DPLHi?hH#|BkZ+IMcgU;dB{qX3l{Nd4E_rasH@`Fcr-2;#AFwl0r8y=m3
zFFZOcA9yq$yy0p2q4cmv^8pP|{{%E8?gHtbcyt?suBtu`T3E^8(Os<I(OCexvqHk7
zvp~Y5yO_fRbR}(Pg#h%DYYmU)!x|ozmrIX%cE@q}cH4P&@_|Cdw=++`$MScH0;1^(
zaW8cL@R!5?|AY4r!+SBm=7TcNW6(Tf^AYe4V(3b9Ps<}^9v;1Zj2@6K93P?!*WlBc
zd%?F?=Lop}@XV+4)eC86P^NeT+GYR`TmDwiRi7{Cg1Qi$t`|U!0Z@~8*Q}Wk0PW2<
z?ge#ceE3~odF+6s!!$Ne%cJGKUcD@cL=ypa=SygJ1{Av|U@?0FRPlpnk(B=Z|NlZ`
zJ_Ex`3DDZ!&hIbk=P@w6$ou^N|BKAe(Cp~ZTzi25G&f#*;l-bMkR0hV4>d=E`Wx{4
z=+XQ}12p*J+xgT}^Q<T1SD(%Yo|acVI!g}}yZCgz1f7o#xoZ44$R`XQ-G$KjD}=^h
zA%{nIq<}|f1XBDR)bOypSab}sq{5xU)ADO+4rC-O#t{|;-Sr%xP8q0M?b+!r@WOW<
zD6;;)a0NNLn;E1RbRjw<*?4xl{s1j6`{4<?gR9d;<%4Ip>jTej-xr{1=ocWV2fm$O
zJUj1tSbi><;@KN}!?(9Y<%Vx}jmizrZr2;0md8tWffB}Q$N&7L%fKsXS`U=;`GBTN
zJ7Z7yc9#o)#w<W{-8)_@B9a(r{P6yZ4Rb-m{;oTGEI;tKID#j6<34ycANv4mp}+di
zUHS~)01jw$N8&}!Tn2{Mn>;#?zhnU&A_8xrp~OdkXXihkZhMbzHt;MwXnh_i6kvlW
z|M^=$Lw6uQ6#I6kbAY<3Nxt3r0=||{OND(ppTG8=02-ZQm<!s^{-(r|jQbmo%mMY!
zK^K+!cBTuw*g6LiFKfXx=%6U%oP>Qm?>cx*&reXH2s+Z@FKANwbu{Ss0#KY^07aL;
zix{wh&tHVh0Zo^>GI(~fd&0*sTopV!-4#5$T_rp_-6cTlQ#`vt!#n;0o}KO-p3O%!
zJe!Yecv{}&Z<z$@kaVkq=0iO#kCcGB?wi3UDy)Spf@wWaG99!#0UW{cpzgwJq%s5?
zUKeJA<M#wOenZHM-=5j9_ywIh2|r34p5kHSmFV&P4ZM7YbuMVdGo)$m(_0VP1@@u_
z<f`T$ABxjFJCAxa{}3-0a_)7}WpM0Z_xyj{xAkqQ_G@#W&ex!e%{`hQe*nci&umbm
z>^LZhB|Mr>GlDMa<M8dwQ4s(ygZu_6&?IIvFgPD$(FYyNgdCZmkspIupwa>~Hr)CA
z#oAerB7_qx2we{jazCs;@!~jm5LLjVll_}J=-zVhP-PQD8+0}+V-|960<}M~0;_%}
zi2fIP5E^QZ4#*Mmko!nII)8xdE9LZPKEUDe|3K_v(Ar$k7Dk9Xa`|Qf9v}Jd0UF*Y
z=KycQ`UqMBW(n?rBk#li37XvH0F95do&>Ewdg;+C)9GQ!0$y(kna>Tm1>R5fiNB=^
zw3^TwwEpWFsQu8A1>$!*DL|G^I7xug8Ek0Mqto{QXc@CdZ<WT2Gk-u^8;^K`R=`*u
zE)n#A)?X%|`$8=5l)m@vOy_8>J;01uCsA7N+06#pR0moD2&%(jYZOhP({Fz~x<hw>
zW@@m`cVOGUW&qX!+0_DGZ((_&M8TuG05sdedU6i~!;2}8|Nn2Uy}(?e@^TGmOt`zg
z0<>pf?H*7Ew{*vgDNiBt{H>t-jc_;+Hs1?!Ab2VXK0iocy`;KFcjyV|t{a&9#9{8!
z1iA0aqyPV3i-P5POHY8DCIk}x`{e(B-|li~D4hWJ?LUC_&;0;R`W=C;@dhogg|DwC
zw7wSHx|$3s%|JJogX8=nSPXQ5iAQ%7czrEsk*6+bd=Rw$FG!&?fWxCZKmxR3tvf&i
z+}!E~-Sq`puXqA9zvr(2>iC1kQ$eF)p!Kw#o#6@|t+)AGzztXMi8uT$A3!4^ppggA
zS}|yQ2Q(jey!89a)1Xm4%X9oK-yvIDWFvQjim}=q;1&P;8oZ3VLDQldmpwaAcy^xh
z>^uo_pKo`ZfM>U{Z+A8aXna@m186lQG;lf(fi6}<TF)sH<Y8$8-v10)&sl<P|MR;N
zZ4YZZPkyJbK9=uF<UjEX<O!tl>%Aytsupr=uH#@VRr2lp04x8}_%$Ab=A6^`HSTy?
zp5t%L{{R2~Yoj!Ny`!KtZ^;s6!fE{B4?pq8Jx$}+yHj@FxAWucb5QP;vID-IpVIg>
zuDssv+j;CY8^m9pmZyCAT|aqRp7iB+{RlF(MFkW%psD{b4v)^z6`%#go*<w1x=Dbd
z3bc6(a^8kd=Lg5`Fa^iXFb>b|FbU62G0*NW4bRRn0gx*}>mxiYPn6ySx1^g7N`S^|
zUU*ub1FerJSMca9T>x5Q_<skaq^y7R|G!6X=?+j)eEAxL4W81z^`hhkXq5!`93wdo
zXpDm513U_9c^tH!teWG6+f;}_c2hyMB4nH8syqMxqprd^3>kga1}g&Xl|<U!Xmbcu
zCL`~agl%uUbr6(1M4LfW=kXV;QyCaur$V+3Je&fu7PR2u<z^7;_=`~Rq7Ku2AjRFH
z=^(1}{)>xKK-CL)IQzg928P#ipoOTQYiY}N{Qv)20K{v(UBUtK{**pY!{PgjGm}9z
z3Zg}YTz^ae84IcO!21;qz}IcS#-a44!1kNK>YD^K^$Za8FId3Nhv{Pi=|gE>dGxaC
zY{oHul>w_C+&w(H{R4cu^*kUG;vV46;y$E<Dn5ZW<zcQD0PoL}^0@fQBm0+ykEM<U
zX#N$tKhFYuUd4$L0Uv7(*lxU56=v`hxah%cAUYPj8Tu1{{J}JSy+fsJ{N2}>K;nX~
zGeA8}7RUdt43HeH>AJw<xGOk1AqhZkGAKHZcr+gn@aarl;nSJA0JK)aw^K(2uGIrH
zVhg%E&mPoP=I}sToCCS~_vR!}jAPs(w&2C1a`54HzB4}Y$MTA91C=G=2R`w~gI1n;
zYaDkr0B!aA58AbJ!KXWQ0jM#W>C;&{1GLc3*Ya7Z5U4@w1xgDAlR)j!FQE1)qTNj7
ze38i{P<zy)yLJJ{i!W3sf!4E^f`i7?@xL!aslI1-t%B!qSJ31Qcs(<lM>qTb01n4*
zuAnA~XLqf{i}w>jiShl5=M%xT7igRNi{JzQ|D$b7fQ2q}@sh`J*8>cYa{q!yCuCDJ
zq$%zJ-B%9Vt?hB#6|~-u!Q(i17d1!|d;|9>56xR8){w-6H1YsWOyK$&k$wp5-xl@g
zoeJ8|4=z#ey;$@Pw0h{UNAF&c;ET^&Kn095hetPPIKg!Rcnfv6GpHr9!K2$5)H2!O
z)0=w0qZhO>^ZASB&;S2>HXrx!fG&uFG!S3NfQC&#x6Qcj01c;jbh~Z<ZNc>Dc7^Vo
z@a6zbjDmKB9ar$_eD=Z`eEh%b36D-^4bTepY>+#8YcIS=0x_D8WFRlE;s8yv6;A*a
zM7*>1f=a4`0-)iFZc`T!uUqsjsD06S{6+c%*uHDf4t~&T>(-Od4ctv2l}LN8yG=Df
zBHf}Fp?b9^FfhEl3~ID=9(#EhLc>Do?B@UfU(SVyy_^T3K}qy%KPX*+Q*kNl>vT{_
z)*ZV7Gzwt(xWvn+yZVDi^WhJmE~E*l`=#&MU9aGI+#NH~f!o2J-SrYLsz63|{(n)_
z&%oemd8c$2JP{}KLv9m02Fj(X;8ZLRnr>Lo$-wYh6l8aA?SU6cn?V&w?S&VTn?cL%
zzQ1VfLoG_d?Ez4E>KGOb9#UgX2ZaE%Km|20TW^;b{pT*#!`D6K1UK5x_JK-)`!9|{
zifzzII4{@sLGM3^eYvO)l>Oyl=RtwaHw2w+!5@BrU(@vhzosjQ-T+$a*$K`8uu~4e
z%Y=PrcyxnWz`heax_vi*Tb`Yu&1=4(^BF+HehmDApwuM~*(`7flwqtO83w)d1+^a$
z`6c_5g@+{zH=?|a;kI~v&!@L`fe$E=TONj=doUGpxWT;_zMuwG=V8#Y7f^p^-$u}y
z<GmnRlHzt=FQ`EI|6&@L{{EtDBPcI|R>M@lua7zmvLF^D(G9i$wD*AZ#h-SN&hIZ=
zKoXtzU##gtjZ|oSdUXEw=w_V`T6GPHPoM5`(iZCc?txhJxd#%n_aQVmW<hME{+vfI
ztHT<k`Vm~;zE}s*0PTN)-6;?0UwQPhdLXNZ_qRafyPlwdn13FK@m>CA(6y>QmXG*b
zG(oMhULO?;pU&@~vO}H)R5AR45As@o26=tEO;kX;3?OSrSX7{CiKI0o|GFWb|JDs^
zN4YD2I>ZW~fmaYi0yO>uT9yH-5d}aZ9G<;3DjJ@><r*(?-u?gY2|7@q(?!MLC3sui
zzDX*cAjAUNFrs-2oHF+#rA+uBvN&R~jR*Q5GS~zaPbQ>6<Z|#JGH9WVXZBx<7XjU%
z!DE=YpaBzh@CHjrl>-{r200b4FS^||Kx#CgK?e5+=yU=L&?xH53`Wo%N1x7T9-8;y
zW6RmsEIfK;1U$3PTKK?*lR?WtJvwi|hde=rFSrrhiF-KN6TCOeLjy7TY~a!9Vc^*<
z=41KDqZ4$q_|1}NpU!89=HLX6?z$5mos}m%j=P-z)tesOb)Yfj7vOPb(AaW!-35>C
zxEmgwk)Uzu3;Y6ZANU0wKX@E>`v6woeC&ir^T87ymY+&Ddo&-g0F6KUSUxI=g^WLY
zcAKbpbQ*XbXHfwu_v|iF0S{q<7~nBX5Q76WI3?f#9ffw}7XY0OAn2e0I*t=|^pb^V
z^AQVA%PXa8eL>4=Q3j?#$4VN53J~z}nZ{0VU2*|5dWt&o4C?Q7-hYwM2}<_QOU{Cl
z-G>*JYe7jaM&*QO^N|yvA;ri4xl8ZkE3<cj%WO$db+O*V@^G0A=sa311KQa)EIb&0
zdq8fXxCB~60Gl1)Z`B1Yg3&@B!Onvm%XS+&f{nBu2b2R;JSVYuSRO97^T~c~0jja)
zbTBY@9DL2>k$v03gYmE@<5BpC_5Q#A|Gy~fU|@K;6|}LV^ZSdl?F<Yr_T2gZ|Hbw@
zSVpY9KxTE`e=(~aR3gIqi{SDQX}t?*O)Him>*7GjaP<TaQpT(K1spZ_1sybyP6)H`
zXg+8G8o%BKT1Dn-`IWz=9z3&Bdg8SgXjs&zyYvKj$QtBMP!T_=9aP4+egLmE=q?41
zZ+`GR?g~zxh@opxv3S+T@)LiHKDZPNJ>l6~qH+RsB<l%J(7_Uxhf9J$vG4q!yVM>u
zW(t}t>#kPtIPQ$z%=qREYTAMmfk-=~c;^AvE5AT9PTi#!JUZDtj)U6g44&Pt-~sX%
z;34x)7nKK|-L4lryM1qfO1~SR;^G2moE$Wy{sBDd4BlSQ`k%kW`2YX^j{o`jTeZMP
zx`9j$1s%4+0ltft5wr^dI&K~T8s2&>3GUB!-ha{125R6z$IDNE*G41S{X~wJ+qA*P
zQ@cTF6xW((@cMdq{R6pE5;Fb_UMh$fe|7<7aisC*JFTD^;`xg!t>D<x@a%Ng@I3Ac
z&h4Jvt_Gf+?gpOCM=d;?k6VBTnk5-QTgu@B&Ay=ijxBhg*&JjvzN34&Knf&YWJ0Y>
zgjh++khgDlx`t=Bs|IL=Y)dRC<XfNcw|Iafh)0FL)ebyp?V<vS8WoU&?h+LN(40mL
zXejcvEG$2P8oLi#!0`ha?|wpF{LE>A#m^e>gwzj=rGk+06>$CqwNJl!bjN%6bh`(5
zbo2Rif*X0R;Hng(i;cG49I3r`$il;thZ(88#|&!kSspIY@UZ6g<afH^V|lnl#-q1b
z!>700<Hd)U;D)nD=RI(%XCeG7r;p8`NausrQy$%-Y8yd)5yYrTx9J(s`sQxYQV`X7
z{KfTV(1|Fp_Akgk|2(=`--Bc!?KAM=IpW*fvCWWd8`=y?hTp;Nvu%d9x54Q-B-o?#
z6R26=TW8_fUB>Z3=r3sG_&8`<)Wh;{sWS4p$cN$U2|-;j29WnG@0Z*KpErGL8K{j*
z?s?N&nn34GBPwiYd_vl%@7IHZ?Eq*3faLaRbQ9R;FM^vOAz=@p!65-+qqa|Z79q7y
z!Qlg4|7hU>&f`Aa<sKf8#O2X>$Vc;233=l;o|Zgzu=M5ITViMN`ktrdG5(grtPBjE
z)_gwvP9MPh&8(mfkp^gUZj7G+_)>QR-|kuo-|kce&?M?k*cLXQ&fh+oKS2APkAd1-
zp8V@SdUpG9_<)Yv`UGCTk4WFWHbEYiJSgY4f%h9f=5GPb#e?)g()k@9%g6jJN5Eqf
z&Y<h$KsUt2fVRXqfVPc6_Di(=U;%~jE8p(i44>|z0#IGnDgy5RfKJe>O7Q45_vw7)
zq4^1P$;`n&ES|kSDvaQ{>=!ott%|G+4Dd~+{GOWMK=*eZ{LkWf@E^0M<vAaIr;i>x
z|3d(~hvj)6e&-M1CFGkxtzht~ztR(+uxh^tcE@+{UexRc(2&(}kM7bFp!)J<2WXi`
z=WkGh)f}`Q(UX7uLC<bK32^uv0*B895BT|ZCZO;E?{9#NN5_EnH-Ju|j|ZQ!Y|ZS+
z?*s~)5`T|wP`E^Rfc8RpfWqWJ8o%B_{<bGz`%uCJy4ji8quZOqv(p!}(44`u`KW+T
zXI6%1=hc@RK??vZpYyk@U}j+O>~#fA5`*>`U3zT*>ZX8C>GR_N)ws>a1Ux%SGZ2}D
zU!DQ9|JfI$I}2n?w>#(nP*3nt1h+tYR{}sqpeO(Oi=Zs-(Rm%qek7#xNWte390a9L
z$a$nb2eIr&x>?c<y7R>YzW*6=z5;lMC~Sxkbc*XI{>X!%%VHeBw*`X|j@||SHXl%(
z0@=<CndvUg@a&E)@UXnc-^%+RRG#Ku03Ckd13H!ClaJ;h&(5pxy~~{kJr2IK;cvYR
zUQKhD(5~ejkks>^--F-zhDYmd@SZP^&e9bnb{?If8+<xlcYx3K=?9NVK{hjM)qw^_
z1$<9<!n0T!fA}RI(A-RS=?ahT&<(HA_c_~x*1bUYIooJ>_4;TCfEIdMf?6%0lQ?`k
zkNR{T1MOXW@w*mOHG)PdK&O?JuJG-A0Wuz8*(ZJh-xJ`)Af2uWo}IoKo}IA;K9<Ky
zczik!cxZyw>I;<q@&#So%m}U+kNb3<evwxTDlWn=d3N3e8}RZeXe(kTG5K&gD96G>
ziiCU!UQmsk4?$<Gfiv-QmKTp}Q1apHWYBfk2cNKb9(=~)%lOX&v;@PO!@JW*ox{76
zMcuzQMI9VweBi;y&o3_5fJ*IqknP|V2|k?(89to_1)i2)ONzn!z+XViZ_wa$>95yu
zB${80+k9ax=7aZ_=UxC!J$!$`SOY5e-oF5!9E!SE88UAEsTvf?+=%L5eI=+KL$3bM
zf>#oYmVv0w<1cPhgZd<(y{9kOs~H$xe}3`h{Qv*2-@N!)#lZ0T>5FF|-n|#ELA>iP
zZh&|fUbB029)8XDngOyk>cR^}P<h>X|HaNKuwk9YU+lO3{~vU&>2_<3^YX##<vg0-
zfSODR9^K^`o}k4i)}SJ&^B?%ElulRB!E(Ib^T8QH6hwn|Q@?ie=&oG>TJGN)I^l)y
zmH(g{YIs$lsz5YIl?Ql4&O^Yr+bzMfyDS6VegIo@1(e~s-9C79`~C0$t#g6y0`=@H
z%K(+mzTI^Spd}8l^BYh$rXBL=uH69gI`sTPP|gOO-|*U_S0viQ(gu8f!(|V~!&uL6
z@Ui>}DX+jI&(@5d{7&b=V#%ORNOvWuwsV&NpAQMz?+HFp_5%1sS-l(lZiiY9@V8om
z7TECX-QaK22kQ~{=&U{9(;d#?V|j_cg`EksF9>v`B&)_e5c9xmq%)2tfVPZ*)vblh
zs(OLuGEbMiaOsZXaBTj~QgY9;+pWT*Tfzf$=1k`Y56v5(^BaGlo+o?Uqua`}^T=z?
z2|k?<Ks^Nw&x>#Tdv#L0ENxyFzxMoo#gp-fS8vQ~jPe^)+JIIuiGUWVTQhs`JKY0u
zSwHa$1Peg7gCF8=3k4O$-At~XH`Dkv4!v{(aXVd4xODnna0Q*3YXB-W!27^|fKn~!
z>?cTjoX~j}K9)Z~=abYafGRFX{q@4f@+WBjsUK**6MR8EKWNcT=l>V|M?n?Bf7rh8
z2RlLB@8CQ2e!Xbg4VsXE?+XW=6gmMm;R33zJ-hv23mJSs3mH%n5$LS637{nnnDcGl
zJrMUJ;5cuIzvagtkmbG;JghnRn_mC<|NrGTkO=B-Z`jnwaaZstG-NLHYB{K61I>k=
znGRwdf8hz5fbBNjv;<UYh(?2`&igNpmV-)A@LcGYa!`Tu{l$YaP{u~C{vqR~;Pxt>
z`*Z?8R+4p}j#xR~`*c9dC!x2lNWjZu$bBnEX2I4=cGhwDf^K(#Tpq*U3fg@Ey6OdV
zuZ<_<N*n&xFi`Kevn~L1n!jhaUxr8XK>_gmgGckh51<+H@1C7*8ITZcFJoZ9bpjpq
z4kGZ~Je_V8pz#iK=#jzd9?)rV<N#zqogY^N8QppRg<Kh^1BdOhAWNuen%GPO?eB)2
zmj%8r=vpZ#itoQTSIWTPdAtpDswrrpBWTJBylWS_{;w1~UdP|^6x>%vx<lvXf9}$!
z_(tjufCp*vOBonmZv}@JsDG9Kvd!{7f6FS+&<5yEAhVgMeYA(*frZ<kaRYn<3!6X+
zBwh%Bj9&&B?*#YH;NxYDZ!|y!LFZ@iSwFpd4M62Yw`n+Np?7bdgHPw$1E5Aq?^4hx
zjYl`@I?w{<UJnV6&fX0mw>2Md@aSv>>GC)ZKDLH|0hH9Ay{In%1swS7XYfG}$6XJA
z8gstAbrL?EdqH|Vn~!DqbZ!Odg;~@2{DpoAXlzvV+(OW?zo3(YAT}Rw<$xVee!LZQ
zj1I^ikK?Tp2vG$n^LVSq|Ns9bKy_*FR*>sKBaSaXr^SI5l!6CBw}O20!pQ?v8iCz{
z?hde}-@tY<mhgCV?*-ZO;$$&sc6cvXb1#V9ZTf5hs9|u7!=o2$&WkLNFeDgXbb#2M
zH#{1TfWjD*WMW|#%^rU33aS9Z!3*iYI*D_EJIKghhzqzu>~7N(G#9)m0y`2EtS{I=
zN;>bq$S4ArJK*$%8vo62Bs@F6dv^Zy=rt9d%)qdV6C8j4Up#dNwLA`cbTfE#7rgLb
zya%2!>-_%03S@q_>0I!l<nJ$3!CcVc3H!Jh7#RKsS~zwd@c>Ql78Zd{PTcneG@zsp
zQq_6?g)WHhHQhW3$);*tHeDzLm8Yi0P@8sxx!7!CN3!WCNLA<k7l#W$1wd_uPv^fE
zD>*=Hc92JWK)2+8<{9q4C<LkOHoXE`P~3U{MGTmWY&`h%^9wIl7BVpK@B0iI$;~JP
zCGP(((m-l^O}|ZKVA#bD4X`avI0B3lq`2EO2fTp!`-?9HAYUQd0}C*RLPUW51F7n~
z|Kbmb?lql)Y?Bu*o92TQcbhUnZR!GZvDtL30AbS<kgCr6FD4a$0x4F)x4Trq$MSQD
zxlgZZD`+`H=Y3EZrgAbc>;kRP2QLG7_Ts1`sQ#=L0Lgi>gE9f=y0_1z7r+C|1uu{S
znge7*w`n$Lk#y(%7oYRd0{|M(4h8Ul{+|yC=)WMfy{2;~Ffi-_B`;7Uc!O*PEkHm|
znF~OQyG@zrg7kiW(GBKe^ZxaGg!iX{RCV5eF&RYnn%W_3syzX+?XClkXf*<9=r)}S
zUZ(v0g)Eqh&9+P=+tfj-I`6+w%?FJYmnQgh<{khIzxZ0d1D$MDQs&c}igZ>nXuKRe
z6Z-GPQ+81Fz|t_NlGvCB^5=7pZd1WI3=A*k<Y5LYNZG4ANT_y0{Ie9S>;H?zd7uLJ
zn1HY4*U~wdaTWqL&2&3xiE-!s7xrK-vhSdA)&n)Y^ZyHPsL7rnwY{eQ`;hYZbbB13
z_$C+RYE%E&AisQnaXlAQ`sW@1T_(|a9KA%5&qKu9U697k`!8;T=w8!AWZN`x*;WTq
z+->@B7R0tpFc;ZZu&lT>7hzK|NLA<k7lpZ?(VbcYP>Od4jrMiJV#A{sQliL%RCb#}
z*Ehf50&|g#hXz`BE+k4i|Gy9edARfc3lWgoUehbR3=F&Ype0I@9gaZTo&ydvDX2FV
zfw{=`z`XG;2jPu1AXT0BU#tSry{47OHvO>0VUsUNakuG)nUDap0CTa~)PiJ_6G&C(
z{TB{7phTYP;oDss;A{DZzhyqiW4*a337i2m|H{8DjN{uO#u8zV?l1w5?lOtj0`UH(
zNAodG(Av4~+7mAVFx=Vs{>92{kmKHabenR{1g%TX1_chP8;^meA|Gah1LGU0KAI0w
z)Or8K+-y)TIAQ@BfNL!Rw|zk8%b`cC8%TM#>B<?PHqHGPhF~tT70`&S%Z5j+E!b86
zUs!|G_L@reFfi=m2Km49|BFr=9Kn4r3*>fF#~C0WeSdKh%tf{b7O|obzkuu3%OF*t
zwU;2e*K~I`l1=isY$^dM?l!$X9b!{_7AW79;>dT)v%nz-wk;K;vGe|m<SbBmB<I^*
z#sR7=WD)TNsV_=FgS^d$H9RfvmA=6&U%q65eHsq#LEV3GAIwGeDKtcNvfv^5Iuqp9
z|1Vx<f^0^QqQzhnOxd6Y_JO%r47{BQH*glzz!@O5sQam3tG!lvt@K*qwcKl&*HW(~
zUW>gJc`fu>;5FZCp4VKc2ib$#I;|ihI`6;up8;-Y!55c8_rC-OID*a*{q4~`6?BP<
zM>m_{$^QWgu$^bIhY>xR385aHPr>~`(ELS!XXiJMZh4>Xd=4MW*Cio7y|z7#3=BTK
zs*R1%`6g(k{2f#*8CZb_bDRZ0V@8)Vpf$Ef=W)<l8euC?s=E)mcwz>q&EI@j!qf6@
z>3-0bhi*`X{S8)*g=c^xXBTv>u@jh!B`o_vz6G_6zQ6Fx07cIC7d{}hsB0QPqg$Xb
zN(4!PO6+uS7{TTnQT>bXEPu;>@ans7s8!+wk8WdFaUlXPE(8$)2rVx_S3f9P{{IgS
z9*@rB9=)c!I~W*V@LGZvOCJU;K?a3HAIJ{S;k4iY0M(aK=^&qgs?}Xn7#Lo-rh~!)
z+4GK|;C%jKQaU(Od<R(+0M_&Ug&#<9^I-uW%h&v^!Jx4v>_HI*QrCI^#f3C*P#`52
z=y(8V{_q=U3N0CQ&eso?atV)K-YQUK+--6Ye3(DTT1N{|8_d|J6EZ#cA`P}48lHYU
z!0r35;Pdy{J-Q=6N4pDv@@utEXYC1}UR$eraB4EIXJB}(2ksxh!W2|wJ}^fFASel4
zFb4;ofT!iX5+l$l>EN>kveJ+f+P@d8K&s%eRJsltBd^zjnmC@_?4X@03LxVR(;zuH
z*}=f0`3G|`$dh|Pn|eT=WCItL?_b1#OzSmWT?elj-@ouM2Q7%<2hCV$fRg%)R8YZ;
zrL)otlIS-509q8>`Ta!&n2Q>j$H1MHBdOrn1Xm7CAXT7M3LqM_T!5wJsUWG&`!D!Y
z(Ib!nl%7HNUoT)_cyTEO<Sy2UOBonIOCsR+UxN-~iT%I8fe|{N3)&)()5*XP`!Xd3
zc{dcO{Rc7+Bwx}6l6S!=9|M+G!6{$T0n*QcRUUK}oCWA$W7xUOy{3!XpjQ|k@aXkD
z;nVAS!Kc^vhT(x1lae9#7&d~3fMY@97aShewyF~u7)nGux=m#!GBCV&3RMIhBaIdS
zcfGr-K`MVw0F6fr#(pS~^yoHyJAr}W#kORSkU;DQ(5wjP`aV$F5&)kE2RbMNdKKcK
z2@DKiJv%{qnvnGbfzBr67sv+L10GJUhwAAA=~4FRHf@F2n)~8~Cq!fJi%<Me8zUjQ
zodsS8cpzT@4>~>*)LOWj1e%=DW3_AtU2quB$iTqg1=90kNfIPtTZ<qj$cnc!FuVi}
zPk>?!wEpK4f8-6&)dAZ&Kt7DU2^ziq#2<g_6Mv-aDzI3@txx;{sSiH!M?Uz(AN%kV
zfBd6Q{E-ho@kc!V#2@+M6MyW>PyF$(KJiDqN#oZz`oalhXXoLUjG!_c*|nhS!a%N7
zhq|`Aje((5qDt(=zeK1jK^OOc99Gx{a*QD8h&_SW4@e;lT6fIfIt#S$YaM8G2Vp#D
z|M&eDXA&U+VbBH&h)xDjKs19^Y|n;RQJVXSUqDv~Y`j41hnIP%hLjq3bn`v|EhGkA
zM+v@P@?14&v_f=E4Jfr49(duF$iVPgGmT&4#><o78L;~=!l9P#Ze?I7)vMBZp$D<7
zM9T0sqz1JBUtXvIiO`on|AH$)*6vo2F9l*heBu|#eetsB-~ay;CQJa=ub?~LLA#@k
zKqhsv{sawbwI1My=TFG`)(;7wqQ6=Iv@Tb~<G8Z`187X?KYuAZDF6NsFz^74M}ii*
zA3<F74ly5eigoM%5<bx3)ScJ<UjQx9j6DpxSp_2B`NIRMgxBLZ^!D}t{3T4Fjf(CZ
zvHuec;O!|;{zsJOjc+_ai^D+UHQ+HRpKjCDeV~3xoP$T_+!f%lI`DXsM>i`ID5N1{
zbu&N_0U4_UjbwQo2k%d302i_UUR(xw6mbCqXq?Uyv=0irbG;R$7dlo4)(bl62WnO4
z{}=TMpq8Vm9LN#ilfIXN#`Zjpw|@8szhm$Rg2@0Kr1L-w=t0KwK;v~jy?a4E0B!Sf
z1zl18LIC8jUel=+hzjO~k0Ge+2OarVAOUL#aKS3A*X+>B2Gn&|GW`GFqqla0NAFgU
zn_dK&{Qv*WbpzyH#{=M7DmT7xj|Uaehr!~YTPhE{cnlH&-#7&Jx=-g;u(v@g@H$&T
z?(^vc`|%sti;&?#khfl}0%?Sf1LlF)-KNKTKqDjI;XzO^zVHSK_kv^YMJ9+19v%cm
zDbnzuXXkOSbEH5MGcQ8o85mykzvlAnJpS4jM8tb`9tTaUdUTTLJ}!`jQ1_jS1Ep<K
zbu{;_1_?vmcQOvt5c>Z@I}TjOp|sZ^=Tm|jr=7o>e=(NWH~(TS$!`9|#NP@&(+O14
zcbhH*4Xs&zFX4T0H5O87+yv#I-r5`B8K6%uF0e2#yu1PugJ>v#Yghu+0KVa;xAp;4
z!+miEhL?*G8tkDOIA0WlHI!s|^wz$BDi)GrV0c~W(al=e1@f##^DmZi0gvQE9=*Kz
zpgm39A_rfyd-RIVssKrYHpX~JfEGshbUR3R^qM+VK+B{}F$@f^O+0#QAAk>Wy7i({
zf`Q?+0>~z)zzvX1{Gh8)p#tilwK1KCA;lBq{7aPZgO9(1+h^CnOX5MBjUX$Wdqw*}
zPU_`-Fbm`aXytpt1GFaoo8f^MqA`%r<Of#)t{0$JX<LB?*kD%)Cxg|Lq<|~l*4rh@
z&<e4(lYyZ`z@yu=q?3W+g)Kx?u1|Niz-ungy`U0-f#HKaWRMSZ6A#EPP}`*iQoS4o
zr@a3Rpe;W>o!JsEK}QgP#kzU9Izi!xeChTF(CkUCs6+|K8-@p7WJEJCypHieR4b)&
z9^Jf`K<j$pHNqZ<o>^rK46jdt>a*}@28J|G{<@=}>v*=+gK8eo0!RL?aL^sYilAF+
zLCprxs#I8ofP5q&xcmgCR{=<R^#C6&VahUxfx*M_dkN<Yi)c`AmI#6(6q<C(VG;TQ
z8li2{pp1M(0JQoHska90-#KD2e+j6y4mSTr6o&cg3ZP64vJh&%nIfs?yTHw_z%+lk
z3Yz)bRPmV)uJ1g$Z9jn0+c(2YFXW@ZO@<O_@Mf|9j2@jHpm}v?3I1@_({&6C{K5_r
zkbNJAafR;^P(vQ#|40n~yQ`tOZ<#uN_mS9t4hx1X<j@591C$Sszc66{rA|{_&~oT*
z(Q9p>V*maNg(&zWEq@fKy!-ybIufJ2gM?47sc8`d!wdUJkU^|AAcMe5JF%R<2uTkf
zotHsLuDgoEqnFiZCP*9j4s^~JM<c*NA^}=$(|K<fXdyQ2O6`~2F`!;uHHSyHX#z+O
zI1H!Y(eMgf3{-<GF$ZaY6#)r&^r(RfC{UqmZJXT+EfJDh85mw@Lo`W&Ti`yO-}j-o
z%^!5{FH-j9@#toCZe?Kb_|NcK3zS(`g+p41d^HRVr3b3^f!gpdUettxn_{jnK+E`6
z86JT7^)PDwK?yGz&|n+PD^tTE;RQb5=^JWz?GA*8mjOr*IJ{EuXaMat0+)uptn45S
zFiQ;Z=uzVW#U8l-V%-7_FQXO)h8MrXz%IiPUi*1L+7K74gThM~w741;UR7a`@VZxp
z99}+Q1i}lc{?Kp)9hvpX1GMO;cEbx@P`3<J9=-yNY;+#;=w&@M4de(5Lr}(c3j?M1
zBj6JK9=Q4U{Y92BXx*@jiU3%2J1C!kT1}nDUIdu_{}0&&d?gH&UobWSZ+LMRv_c6q
z!z;pZ^{gXkkEcs#wE^f@5sClIKHaq&KzDVubb+ku&fNhzMpoLV^Z)B&c>5NVY6`+Y
z^XKnNBFQ+9#v}~1|JT*<$!l}Z|2!(C+~8APd^(^14=`{weB$|^uT<`}5NOGfPiO6p
z7p$NF_|I3$1+M8kzyA*~fNaCjKx%=3#uQw{{)3lZ*@%Hc=QzljCqp6IGC_w|ZUC)J
zsPyU11?}bcw7gpq3qE_O`RE7m2_LNoN}NH};(v}(YY^?xU7_G{+yQ-`21x3g0|#RX
zFNo#QT_N!zB@`4}wHIDQhcYnuSiUY@4syFkXY7s_o*?^KUo&`i9tV3>0?Dhdxj~NZ
z7X>>O<Y^QS{(q4g0xH!&YO+QD|3?g$f%_xi{sm|~`DM^WzumSkrhwKxJprc}kq~fI
zz~2Ju?t!+Rb~_7r{AU22ZVD<RKsCb73eez*u$zQOFKa4jt9>^lvE2-YCAJb<&`v2(
z8=ql6s6c^SEN*z<#VW88NZKo52f17IwItYcSmF`IOAkQt1zv*z^3*gDNNn|qKm&kB
zr4%!G3ShEm@&EsYTQDqmd_<6g2XY<_IDA0kv7qy?2<Eq_$)Fg71i`f+SP<~H!1CKK
zeUKX=e&%mM&TkJ4Vfig37)O4~1&#Spn%@|L$<J>`f=J45Q$YbhOn$o}1c|{jLZHwA
z=eJ0ZRm|Y-PAtvx+m}F4Y}H<P@j4JazugREV0eu;zsW-!3re>riSz%9uK}<G^;-}*
zL1E2r6%!HpZAkz$zcoP%aL@?~5dt2dI0qfmu-~&2oYDS(H_1a%SwR3SmEp{2j$kE_
zL`O+R(-nk7ld2#%n!s5VGo!J?WHB<@U4K}(JQe`0X$6m$BIhp)$IkDd{o<g}?_O3h
zh%a3Yzkv&*c7JH#RDnl9j__-^9^emWwJ3o!N%w=+W_fhpGd%Dj&>s{e5dZSG`~_EP
ztOxo*aSaJG8?b_7;5v^Pyn*leiv%5zr`#YF;eD{4+6ACXRNeJp8S^yA1)Ufb;er=e
zO#c4|M-o~^7ze2b7l7+Q(7^&e-MK43^`Nm&=YOBh+7&N~KrZ<Y+NHIGfq~&A=+q#j
z@)=a|1o%VpYY7?kqliB!zk=flwMhN;+5#=zx*C21t5ZO(SA-#_64b7EamtT@0VH4Y
z0CE7a1*imuFKIgdLWm!dG`OHN6F(?4!8y6#4^eY2fYqEUJUZ{A*PI7HEksbwc>r2-
z9>8659)Q=JAVm@{wERG^TYKPzq8}nBFYxG$UGah+WIrq?^OcGtDt}158O{fGD#&-G
zn#jJF0aY%RNV3Rb2pVMK^@XKpVLq(s8Pq=W?EDJasMT%T-;2oTyM3T(IS15!?>y|;
z?Fy<`K|O282pE4jYdmP}k+8D_Xc!CHQ;Y%a((OFv@t?bt3)GYL=)C_wzyQ3l26X(o
zM=$Fw(4Y>q3J&uDWdqdqUX>=uv#_jzT>m6!!;<+EkmE3u`Gyy7L4#_bvIs4i$EZNk
zIZ^Gs1|LXzCa1mU;sZ+0M7Q@uK><Ka{S(3iNxeQipwI!Q+MC{pR12zq_*)vG?N}@+
z_W~^CUVx@t(69`VZN36;aLT>#BHbI2ayNk6e4!vW@PdxrI{soKH`r<5REk;VPKL?y
zm8v17Zz)g;E{DjWB^prr_4R_K-(YUk^b1;#eBGn-vq!h>jV?s8W$}V0TO9S1J+yuT
z%}04aQ{e?qSSrM<pT7BY{xUr9Vj)-wBvDdQKlO4!qOp|=Us$EXWHIU|TTfVcICG(f
z2crG=9oE?bop1!MKtPAJg4)Kttg+xhFh~`1(*x#he(1Ht-(UPy1-T32PyQBgR}|XW
zdaMr1eCeJz8h-_P*y|Y7%m+Ge57aRMRWfp*&Q=+sI)SWLX7D6G{~hrlDgR9cg#j`7
z?+OPb;?8h@0tlS{B0*Nsy#416&3mui(evI-cY^Idh-1;}QBc16>IO?kzu9rN|C-+j
zcyxXSjgj2&=(a6tN93}3ZqQtYyVvCmUIPT_b-f1-XF)StvKuVbmDqmb7jV4++I;Gw
zA^~n3`E-H?kWIi!Ac>H~%mx{{f!-qvDq3XNA@L@{4vse*h0`}Ss4PZ?JL3uqs7q}B
z|3mv*SjNwIz+QxopS8MzM_oiA<7bAKAg+pweR<9UJ{Y77(f}R|3df~-1L%Y-&?s0h
z>tE0)7HlL)4VQ-ZAECxu+nN+Xhk>*~gMwdNzzz}zb%_OhI=>r!gF8w3D@YIO7!W^b
z9W-nVsLTa226QWffuZzp6=;{P>x&m2E=XfQYmvsx5Sf`De%=z6@VWyUW`cPr6yiKl
z$oLsjc-4W{7=l8e+w?O?130|Yaj9;23-Sthr130BHO%;L&S2kT8vp(qRD-oGOCdD8
zeitw>yf^~Ufi1kce}VL%hSz=25@J|*xj93^YgIaOcu6=@7G7&W8}4BqQiV7VHa-jP
zRy0FK|6gXif!qfkpFIH50}iq;PT&y0s=@0GR6{T8bdUy^B?s{6X?qXVV{Lmn9~y9n
z@);OjbVD>D#v4)Gw*C`H8*11s2CX@Ug&mg@B<%9ikjESEI3k4|k>j&9pzZiDuY7fc
zcps9!zzdWR=^wNP5t76vfb@XF>j)kVQm;Xt0H?BakOr6~6Y%J1`vTQtZ96*;8eWs~
z7#LneLp0$CFZmxJZK&Z@3|hYo3$I%aknplgMGmhu4*0_hbXT3nf5d&9pZEpbC1PK;
zJ3!a_faJkqpks+Y@kcsn#J<dO04?Z)umAFBeq(?#9?sJUs;6M(p93_!dqG#$-2+Y3
zm4Md9HXktn&rpGfx4TUhKzhL89Rkq+n&Y|w?MPf^WMFuG7Ob+D^<M*$>FNl@C!mVi
zm>C#eF90jHwzbTKhOj{{1H+4-_F%V&fr~!qEa?k1CI*I=xu6wrNV5g}9^I^>poPx=
z8D0j0<T~%asI-R!@ZDqvhEmxoX;5W&;Dxt6B!I!)L3YCfFO@*bduw03UWAx$LJglc
z^+-NQg}4dSZ@K`T(hC74L$vT=0QnjmJ|++i$l;?5@-i%Z?jf7bj!+D`?iLjK$G-gk
z4-X&79BB9m<S;P2xM_zae9D+{gwI#ddSRsSiL`@+&)y{D@X@xzA3mV@0Z{w>nrG*C
zk6zXbb)c}Za5aPs={nkh8y6+Epyh1P4R@%+vce!0oz9?vkP=qU&O5J#(7F!bi}O&2
zV1@tx`wwaZqjpR|>l9zw!m9Z9|NbG3JtOKb(0PFYps0TUnlS5Sm4`X_KX}+@f-S_s
zHXfaaK~0(mpo>3|8#Kuv1qk=v$Ku}qp#8|;<(Tk6<HUcsTx<?^u`Qa55#v=z<K;VQ
z5iPeTHek2%w-D$b*?<ocfOHf#*dUs0I0nia!Ai(#vVn$a;C{~i`wu*=k@^?pcU<!b
z?l4)5=9;VxEH0J*{`(J`zk{W>>yYS60Bz3ihNriS*5Dw)lin7CR3PGysPwk@56B0{
z&=Mvny;WMn+*tpIxb(Ijv<wa8U`TpX06C}iKnc$D_Sp*5LxblJBGcQ)-?&_S+zRI6
zv%gVY3`uX`_8n?@_ooV!r=j&hsTH`40hM>K;vIZSI9hop0P-fdymNtQ0F`$aphf)W
zr~m)M>w|a5ri&vK!|DUGmq_KEW;(RIQ%GlEc=6H_>=xAefDKoDz?u%8J%-l@nU;|9
z?rbb_d1q~jzr4c|K6fgS!Y35sCQ$gm7V)TnCQ8x5=L1L&IDAwf8bIL#TdZ{X2~zl+
z1F3{HOuk!yeGF0zTQ5?^gj8R0ra{ApA&r6I#Yu>2c=&+A|HZS9|NmpHFQ0-|8Y9&Q
zo)(bsSsQ~KKGGK0!{-EYeF<Lw(p&ogv;;IS29$uWz4&MF|G!7;ff6-PRRCT(bqrLU
zshfhjYv8=aj~HZp&FpIU4Knr%SqKB}|NX?3s6Uy*67`QCprQhDz5uxX29@`qwdtV4
zVIf6^E@;IQA_vYhhZGr(9-W8zHC!+7hqDI9APw?E`ms46Rfu8(Pd1c*pSpVdMb-~o
zE_Q&s*zL!^|7iQALqMyf1w1>idv<>D=(asl4l45@(-I%dAg*=t01e}U3Tem+DbP5s
ziwbCpbXqjX@!)~n8$OWZ77v4`-N9juG>{7qUZgJe&hNM!+;0X8;;G+3K@2;83{oC@
zG`~T(R|Ltu4g}r%EDFQDk6**n1OfMQ;&ktQQ<!_7enWOIIRApvCuFgVJZKv~Bzy6A
zbY6Zj)fAk=#5}rXuYlG+gXY9Qhv`C-A*6R<1kz;rz2w%5G_dm0i{SQZFRLO*0%F>}
z7hVv#tzbE8+uS5*-b_hiV0fVmmMn1rw`D=?kry7_wzo>b%hy25C)iCP5#8w7?I+*?
zUl-;2;*%qPq?-h|S>n-o_+=cZxr%7{^0zpH1Um1(=rRFiR6*Alpv7!2BN!M;=T*&l
z5n}=|c9P-km&%~SN#Hv|P{u<*IRz3v|3SO!AwB|!PpJt!e71nrG-C}PA<)V?aQL`^
zmBYh_5hMXIZQlzmh#V|@d=sJJ<B|y4Jp-0R3!hz(<$a*=d2S5x2-)G2WsDR)C&QuP
zV*@po=<vA@+V~Ff5jcE8jp5-l1+=CZYxsNuX#$6j8dy0zd>(=%Ag1km!3B|ng^z9m
zG<;MN7#Lo>H3HjH`H5e^8+26)=uoi}rSes>9^I_JLCbHuEj)Tn_xdp~yeKgOrK*=D
z|3RzO1${vYt*{s*1a9%pGXe)Be+%fgAy9$s3tmDM=lTLU<)J1zO;8#`PIP=AfzJCc
zM2#SIMr9bH&Uo?SxgjX11t5#E*bQ&LeEIMH|1|z^(BW_gLDvj`PLBlVftLyYK||q)
z@;f-d1CkEQLE~7EkO7DDTSKt_1wFc1bwJ+z54l4Kw5%OGVA2m#Z~49C{)@w4m8I9f
z*|3+j4kQ6F=<ti>5IN8xwxA6or{WkGO4fRGn;wi~V0h65k(>*vmcBKBE*obo0bkOj
zZwT?_<WSHO(JGLWyFj7);-3LDEJ25wfd|q!qCs~7A^hXfYugGMl~OVU)rzbiK|A)5
z_XB{|cXs}MaUWz5=r+36|0P&A1o(6pOOSL7b-5vA&0#*sV#sa+Q2!mWH3(7vz%~m|
zYd?WUcfADof+SwhwHhA(kH@~W2m7sFBKD=J0rD0k$a)2E`K$mcfAFpM-U%9|1F!df
zst-yB{4Hqfy+5F>_wIme*aFpnw%*%b5OuwGI8+1Yi)OF}l=a?4!qD~JjiAO&H}ZP#
z<)DpNu=U=nKr>jN!0tv_?;Yd?&V2`79MVHu@BLB`ZN0aJ2z2=>tg#K+4-RTUb%H7-
z9P7Qo?S}-<&THTUDgStMvvS6Qny3sOovt@v=N@?+cU{2%UU(q7DhRYShWAV!sGafO
zr`L6bPp|I+_#$utJxCBcLDnunM>*2;z-hfC8oX-8bpfbt#|&Ps1-jL^b_HlJi#urh
z3MA_YlxBK#^9F*}P{S8sTYG@)6)pAv9R&Vjt1f8LpC7bd&Yi=j^FMf1-T#-Mtwj3`
zKwE!y=`k>*9d`gN427+4JMI8ECAZUE;y43nd=h-20^}|L(2@S2J+KTfj_ZQ#scitQ
zulNXIZ}8|XZFuobmw^FdcAOpqLl@|*z0i#wouQxuA$NFmhMor<3<)u91MCWp4iCuX
z$=#*rJvu!&Jvs|OcUVfmm)v1E>6>c<V+m-($AlN3K$ds2R)a%<!Nc11MyUW~upj~6
zb$;;|q^R@w3*C3%wisxGMkz1IkWy{W|NJE;uWi7)HJ0fzFn~l$UcDCf>^ugNl6<Y`
z*?9~sc-5o3cEXD$%pe8zS`X{M+D0eef*YTR@(EP_fZKB5J)w~G>pq}?G;m*~LI;!*
zP}Z-T!xAX0KjJ2eZ+$g3^WT7W@q^8mz%>6lXa@vn!voCxZ<3^%-vc*)hc<@$+hw5{
z3}*f|Ieg|r*JraDfXswWgT-h=a)PJ_sIrd`fZuboA9T?vbn!ZD`~qCQf!m**zoGjB
z-hc+;ApXa6U%EWH`xNlG4;sEG<GD5<^C2}J=Zmdc;2?laAApuf!pF~NgSu#-7H%(V
z0J5G&JbKFSg2uJMRjvw157-|8cr=)Sdcz<~tZk#ip)H!Aa0Z4K(hyCE@pBZn?FZj#
zhdO?491iZ$zXq=dpQQ<@l>hl6kDupgA~ijToNwR(9XtRFuU1V+ctPhIP~5ay9qRX9
zRt01|fq3+YgYM!2dB58fd@=zzycF<gU<0k$0co(d)eeJ(mr@u5!;9A%V3*+tum5bI
z5+60Zm_bXcVe<`H8j$ch=Yt$xHW~!O3$owAQWMmGISlHTL-#w_f=urgeFj=p+<E_n
zfhK6A5qv0}ye6dW;jIBGci`)>LH%=Relm4+WMFvVuK{v1D`-tR=&VlA<vyTc)L77w
z(#ZWy#QKQq(DhEetn)#8{z1KZ$iVXn&@K<q;&jN_>xVu6yE6E6W*{wY1YarCTe|@|
zCY65|Tb~oW*a@`0-J_TFEol4*n&mUqL7sywc6ta}snhxWMK24;1W=b8WH0i1i()p|
zdW-KMXJK3H#K23P^%fJ<$zN{~qE6C!3sq1U5VPJQ<vC;^IOaL1LIbb2cnR8-gS6g)
zX6v0A)Idq9_QH#DHS{$XsUSB%*E=1226h^bB?XINvRKwTwL|1E#)kjDh*brpc2EXM
zeg+zWfQ+w##!C?C7qq^IKb-XuXc-RZ@;XgW;Rs4T&!9_kds)j;K|X+t-hWa-B)ePC
zWGBXioa{D(TXdj9PmjI$!wgGy4WK<)*pgi*Xbls=HFlt;GHjWTIB1zqK6pL_v|dy}
zmHhPf1hjQ*7t!nMwt_rQOnUqB1d`s~JOM{4$eEQOtB}%L2`%<hh^T;Ks`kPQZWZ*j
z_EVYA`W}0TW3jC75mknzBk3o|BleK~5-2@^+xy@{3~qo{(e<)|PEPPZ6o$K%zzr0F
z6Gg(wAP+#2TDuY~so`AP69HBNDex#=+vD&U5@!~V!EuHoBTB(!F(!*XD8d5i+aqw3
z7JGdG%5R_*t}n2xr{e_;_d(0|PDPmS5$oyJg6Hrc-bGzcH<<yJ(ZoR^jV+@ofYvS%
z%xHwy(``^BKfjeIl2pFCg2I59{MPai5_dHZ!Ep!5Z=4{jXr14dfLg-cwHIE@RzS~h
zouFN>uvOStH|U&y0Cp^v{I*mcmXuaMz?a|P>*;z~l|UnXi2UX)k3YY`HdZVK4O~G}
zoVYwJ#o^3vFXTW<APJDt{C43!B+gFUhXw=IKw1Tp#mH|Ba<D*ZyN{CJApIMZ{>L8B
zR)1J~+EfnQYQ<bnC#M2$OkYOUz=BJ|-wRL;-KMKR8o({-YqB_u-w$3J2i{is#0%Pt
zzU{@p@L~x>2e$QL-dNV`><10$z}nDivXC}(mm~7}upcr=ZD=yq)AfJ{YYZS;##tfG
z!(8yDCk^#}FY97t4Oe6!Apq&HAT7X~dlIUl+q4R#0UTb-aH;M`EOXl82@S8+o}kfW
zhz@Mwg>5}uKd5N~3$Jg|knoCfKn|}H(v*c)40sR@8eUhW!QO}UZ$V>1@c!*?MQ8~0
zvKAogS%^o^^m9->-KH)eJ>XC)z@s4yd<iKyp;dc81Fpz}f#HQUL=&RFhT^vS;Pi>w
zUke6}kHNxjmlP!I<m{3AYdum(VMk>DRtG%%1P!mHQjqY1Bp1~6bf+bte(z;<K-N=;
zN00e&sGe?95s)5mcsbzFpaxz@2o5iAcW8Jyxic`l;D%_z5nkBV(<y@%Si{1rOA-=Z
z?`)C7D@GE3cwrf@?gtGxb%VyMTR}#`C(OX@FVIY_gh#g<2W*s6-J_c~8?>YvKFSH2
z5%TC2?KOrD`kn=iXa#t5{)P-JKJb9v&*stj{e>*ZF7P2F$6mOD*xl6<;DPhr(g!d6
zzyh+SAk+Ude{+4n2p(4a@M1Pd0z4or5gmKjqw_G5Ts>Is{)_VxpaL6tMK8EN0$LyF
z(Ru)M&=Bh#SMYEOXu<+~g)pdp?g2eN$ntxMpvQkU@Mvf2{}NV@|NNlSs##aS^ngZs
z!3PzwPK619^>@2VfKFl4h<*849C@@KWjqXZy-805s2K`PpR5wl^ockW1#`VgFY5wi
zJr~8ndO)irV8iAU&i?=Z`V@EysN1v}q!^qqmm^fd#{a^Q*Hi3tfu_g}E({DWDj}*-
z*He^z!LgoVD(HxJr1ca(#UKeX)*3N4cHqTnF-U?$Uw??Qo&q&|azMjgu=@0=7}&4K
z>rGrhok*1RCcUf;$a*$H^nk+$I+{P>CQ|r-#^oTjPdh>-EPSRRhtEuBX!uNUW?*;`
z2~mw2KA`dt>w1$?XK-?ZugAG53JD)uE9CH5Es8&U5d9zMdWl}vDd2%93)oK1y`TZ<
z)&rn>uMT_uXHoI#Oh60+*+YkcU`r)tfVbHavs9w*8mQ%jsq*`a6j4xC19kkft|2!d
zLHPl^{}j?+gS+<$Xv7TEdW0<T;3dkv;CTD>;>LAQ?;q0@A0U+;($bE5S8=&_iwMlU
zyRH)BUK3RJ#)^PjyTpaB1$etIG49pF>0TDNd%5tt_X=c9Y76LG0Yvz&6(+{L(D1Fe
zMr`=jUBMN;A;Pfmjk<ynzToz<14?_j2ecy}mcLnr;rSa@mbrjhe$7W5K=ZeU!9`jx
z>jGpw7lpujK=~WC2JHc82{yd^11W}-e+ZSZ{GEoJzjxY0^Y;dOQ27T@EeBq+lExp-
zss&mV)msZ*SC;qw|Nob%pmhL<g#o3Yb=bZ3kbDk0HVM=>{3r;?=b>hhe4fUy>AK;?
zVX&x%>juy~B4{196liq{e>m$g&<dj7+7qv5f%6YIyim)-7|`@3EWEA?g8fdYJS;%g
zvk;;O6kf0;cn1z3m51IS#o+KNN2r8_7fN~9X$K9j20I3Z7oHH+sO8}{T<h;rK`Wt=
z>Wh;CkZ=O66hf?5pDjRqc?e7Y;ByDy<>6WZNczXUge(}eb_%i=7*;L@93wU#lwHJ?
z4}1h*`5@@xKiK{&<nkNp-YcMU2@v`9FF(Y+R-h$s=;il*wDNn!QDWVB?gB1%F5!o{
zbJYcm{EF;e@cO)7RuxqD`jX;a4e%adV#-4~obLU?2XpVQ^TfEf1az<fB7Eoakr2Kf
zM~DsIyz{ui*9GogFZ}NP23`A84c=}KZV-ak{(R&G=W!{IZdn7+`f1R#D`+`{0DKZ1
zR4V%P+IE4?5$HAtwPh~9*bUZI3M&N5K!#d=FWL2C5kz7mNRx-P?NKXe9^PZc!0@6K
zELo!E)155f*==ul8@y;L6Li{A>w%JX&;M*bodp6uoe2`4)p(#)Kpu$lkG~}nR3L(G
z*WdwVTS3<wY5aPu(S{5RrNUK$FV68mf*!Oc3UpN%!U7J61^g|b?I=)7K>HDHz!&~;
zg33pDX9RYBFG~9Y7Jk;g@CG$F{5JBy!|#J7sBmV693O}geqO$yW0RrbR}Iz$4?i=I
zq2TaKf=Ix^uhkM7el?b$Q?0?0=;5ac^*GVtw}%@k{G<(_;n&BFEBsDCWXKA?V8~hM
z;Iq^iK&KNx>K1U&m2rcEPQs&GcB=(a&~d<4{dQM#`1INeg3bbfB)qFHoWOcY&w$fc
zx9MNd7JSR^C3{|ILL@eWC9G|IET94BY{9_rf(I;Fl8tSFpA^Vy@B+U(Y77i7CUb$h
zmM@F`Ll*cwKqRAspu@yK^GRW#Tbci}f!6os>VtwZ&h-H(;lh)XDJY>Lr(96|3#zW2
zxFFTgGCfFj4O*x7;Drbm$PfY72axLe_RFtmOZq?-f|b6^1FesP?H`4&=UWaMM?(r&
zSuSwEg4XlJfLax(>-l<F!Q*Za^><!;-~=fyy$lYxZqwBuvn;=t9DZ>fA^}^^_sa~r
zp6{a>1H%i@jYF*mN-@{-#dAWucvu&0J)a&YIDPTA;9SoK&2OHa-#k12`*f#qcv}7`
zaR=R`{=u{P$Oq5?B>MlkOSQolb2TR`Fo2dtv3Yc}{}13mx;p(P$Rg0q3Y?(f{pY3o
zJiF~)Gy8P@2i>gvVmBuP1H5QMbm2kmA;|oBcfEoKZo_M!hL?b7$b>q0{tl#{O752g
z8QppRg&;Qf!<_!|GY9er0C@a_O7_nLEeh+r|6(EsmhgkyU(CV406Lf%imB;;sQrT2
z?0@l_ouc^L1G@C6^Ztu1AeuJu7YQ=D^ZtuKc6{My%ucKLy9rvY0!sgE`0U@zh7o_z
z^luN!R#Kpv*Q5D>1nNb7wNSH5K{O=z5bIq*`agpQxnM?kaCkHyl<;Ui_`<{TeCc<O
z&I*nfRv@jN_g@%+XowNe@vGo~|NNzTpcAC<=fNMWpye|6Uwi=35EH=ZA7uY;k8YUd
z&K#KT-wV>xdH=;$5Dn3a>VD8VGQ9a8q_Ok<ixLnGF#w+biFLmPNH1tr4~T}C0d_x?
z^6yR1>Ml_Hv*3+?7L53(lKqiT`vbAr|H7JuqWt>@bU{t${TE+AG$icc;ZG&^&jc9_
zD*u@Ag<ml<t@5uV)P6y{_J3xgRs78a4S9n6&xFPQFRGa+ia#5u;U*xOHt}~8w8#&X
z{y}$}TB6ne@cL^rBdy{ObjJj!u@{Kfeq+!rBL7vd=rJ(-SG@)%Z-B{LVDb)_yay&9
zfXPQ-@(GxH1}0yC$yZ?V4VZifCO?45Phj#3nEVDNe}KtfVDb-`{0Al(^g(W50+TFY
zk_}99fJrVe$pa?&z@z|}6ateXU{VZBN`OfzFew8j<-nu@m{bCjDqvC#Olp8hEikDA
zCiTFi0hlxblO|x&3`|;pNh>gE119ajqyw0A0+TLa(hW>{fJrYf=>sPHz+`|v149M_
z=+HjU*`67aAQtF2`wUwU>l6b6Lq;HobpXUl1F^P&SfEqHUakSLW`M*Nfmj<rED$#X
zbO!uO(7i+%k3nLfRp1%FL98l}IuS+&hL;5(mOh9DGBm>*#EJolC4pE$AXW{C<pE+%
z1+nZvtd$@ZXkBQ=J`hU_Bz6tN0v$`6@d3mV1Br2hHtX_$SV|xkXmu^<mLOS0Z3YI%
z;Qy+LIt&aGGC)%@FQ5JY|370LNYxz>>nn(L3B<Am9dK|0#L5P-_JLRvK&&kw)<zI(
z6^O+G+7hw=#0mqkrh!<iK&&1R>lcXC1Y&uyGBCWX0I@niEKv6);{=G60utk6V_<k0
z1!4t)SOFkbFNg)Y_d4Swh-Cv3V`FDvcxeP;xqw)pi()gHKrA_s*nAL61jM=zVsU|3
zOdJdhFG1HkWT=8z-~RpopAiFMy#cW*K&(d~)&dag28eYK#5xCJ-3GCafLNe5_RC!$
zmH;OM!%NV06dCrQuv!KZn*dTb2gFhV$xZ^XY(cCJ5X%R|sspj2L9CL0|NsA=y@ru-
zAtOU4BLhPxBTpA6!)iv(qZ|we82wLiFf3#eYT;lw$|Q7yo#7>u&=PirUS^>mYz!Nj
zh0d@sTxITKyui-zm4$6B2V*xY+g1+7`K)YvIT(+yvK?V#yv544hmG+AE87+}#x^##
zHEfJ?*w~h^F+OEu`@qIHk)3T0JL6t<wiE1(AKBUdurto(U|Y(;c%6go7YE~84z?yv
z#%Y{v%QzY5a0)Si_kMt~$N&F~TJaqHYz*@mIl9;wRx@QxV`JFN+{-wHjo|<bPX`;r
z0~XdVtPD?CKuL*#;s0z`Mk|Kvj0_Cd8CmDEGQ4Eunaj%1%*3^tg<&oe=R6jMWlYBy
z85rI$vrS`R{KUMIk%3_~E8A{X#*M5<)-wG6&nUn!myv;CE+gw3MutU<@gTEB8M)K6
zZZa`WV7$SI<i^>KjAl|i)0r3p1v;4+6BU0mGHzfL>SSWP$;kSXk?|g*9|OZ0Ce|HH
z3>%ra7#Malv7TaLJivruA1Fs%W8~S+#PEo*kOAZd9tIHx1`!S;1{rOT{f3Nc3_%PG
zL4s?U85$VnmohURWt5-G%-GB%Kb4vBC==gWW`@g5WeBG+{GV;hXv7f3zz`*}j+LRC
zQGOXK;{`_fd8~|6ndGOjGG1qr?_*{B#+1asaD$m|9V^2#<|jyAWB5N?mr;2GBiC(a
zhT}|}H<=k8F>&r?X6R#P+se#%l9}xyGvj$?HH3x#|1&DLGqK)dW$0l7oi6{4S>Prs
zLmLa|2(hEA0@qm?uCh9SZu_0B$|%K<&%lt+`Hzv|HY3|NM#e5CwpmP!6VStj%OQF$
zBg1S)d1!hNT+hh0j+x;W6Yo-HhUZKPAYpJ=tzn$ac#?_XIFmd$>@?#!Ca{9iO*bnj
z-ArX=*v!n@$;z;US-72*;XX6#XBLJh%#dUQ4kr@^P%*cXk@Y$=!x2W&i_8oSOgxvF
z8P+iIU1er?&18C+nPELM+ev1|C(LYLm>FL&BVzFXe@3*p3S@L=uw-Dc<Xp(Y;K$L-
z!jQt#$ih&-)yT}y#mM=EiD5G1WJU&tWlU^Om>AD6vAtnpyu<_wrim<UGg%ndvas!C
zVcg7uu!7<LY*R)p1_K5L1J1LI3{k8b85wHW7BVvSGP12?WZcWhc9fCvCL`NRM#lS$
zNZJ_w&$eVVU~pt$aAaN2#Ng(~z%Yf8a|aW{S4Ot^OpGl|Y|EJ#k1(;lU}C(@#QcGY
z@g9>DJVO7^PGyW{h+trd;GfCLP|S9ng|UZ`^Dhg-LPn1JEDYNjd9JcB9A@OY$joq;
zk@E;M!!yPSj0_ClnAo;3GtOaV+r!Mbi1{=l1H)Suwy!LVovdsVSs9nIvTbE$T#cOJ
z{)5wl6$676>m(+I&x~wcOpNoG*w!&IE<sHXVE2IX0LVSLAosL^-7_2Po{eDl>;=2$
zI@moA7^jivo<K%Vh6DzN1kODy45b|NSQxq)dFHS%OlRbp!_2Uek+YwfVHaaNBLl-h
zCbs`fjL(_a+L;;Of@5$E3)>bJ#uF@TH&__YA!6|V>_o;$1``Gb6aFtO4E}7JSQs-n
zZ?G`ba_nMZn9RttfrVi{Bi9;chMkO@^O+eAG4?YuFkEC}>t|;C&crsInei_ZC?rp?
zuw7tbe8R%^hJ~?#m2CnmV;dqQ|IemXx-(;7Fyp++$PmN7laZkul;Wl`vTbB!Jj%#+
zmXYxxBilzt#^-414wQBr7#JKlm#{LFvOQv9>|^BYU}ad$$nlDWVJ9Qc0~Us(j9hn^
z8SXQ3o@Zuw!8ntVfuWI^Z9g;Pa%Q&U%#3TnY3BzETLUZO6jru5tc)92*$%KWZbSGO
zR4ytqN-(%HFu1ceFft^wRWSB2vdv*+oP^|>|Fc1AKqrOrH!(7#uvIbkF|y5LWSoMb
zhC1d*G6XU(1oC$=GUTu|F)~hLWLw6_I0s=Faqf|26k$+jU{L2zWN>1OWXxr&Wh_SI
zY={})@}QZKA(gF~v7eD`J|p8)gf$GH`cM~~<~ZtEZK4<$rZIAyXJnYiD8|6Bg^}YZ
zBf}m>9tMUJjQkH7*{_2%GB7aKGS!0a>jo99B1#Ml21=kXHf2<Tn!^k=XF13m9tMV0
zjI28uS=YlggY<#$2}O`TS4KUCG6se+j^m81btw!Cj~H2(Fflv_73XbC+_RV%CNl9b
zFwAA*-^Rqg29(!P9I!@_fuRFz4(ax1fc052sx!1PGBC6;a%^Q{?qfW}$iQ%ciS-T>
z!&Ojae4mN+9TV#lxWyocGcYjdfc0uJ$}wm#Flex@Vr1~->SScdW@}_*oW;nth>>vt
zBT}^ou74QR7#P%8-5Ink!1a$E1E{UMkMT0oX%2=tto(a97@o6ooaSJ7&sxL4@S06*
zJqJT0yEy~H4tB9+91Q0`%vKJuxf~3$If6mX`2U~L6zX)|CmalqnRp&?Ff_ApKH*?^
z!Xo;BgW&|L>OBsIJ8V&RI2azYi`?R1_|Couv<eYyzbV5UMh1pCj2y@L8I~~KU}9kS
z!^|;@pP_}NnU#T|o%1wfD?h_WK9KZpK9EE+e+|f5kY5BCPO~yFoMz?!z{+ro71Z|l
z&&XB7bAyqggE1SV=>LC4X@)}_3=D@jSQl|J9OnpOV3^GbVlLp60oCv5_2GX;8HO*c
z3=CgbS-UtGezJBkF)%c9fY==zGGO--l9y&!$i~31kd5^v8^ba-8wQ3)VCHi+P{ZK=
ze@2lgjwOr?NfrzYvlv0FMU3KLbJZB$axgHw<<S4b#W07nnt@?Dm)JWlhP7P$3=Dg?
z_#bjHoaXX|`pNnjBUcwELpKxGG7g5NOq>%q7}hae1{F#yJRKYidsv0OuroBWg)HJ^
zxXM<;z;KaW><v4^bM{-HPzSXEwlMOqU}887uG+z2^oyN=;TJo{ZVrYf4kHGJEgY=7
zIT&_wfZaShkkM@)GwViPhX3rWn|T>pI1Vu}FdX9oiJa!T!^ps}oQLBgFXMV1QwD}7
zJYs8j8UFAHtm0+p<CWUT%dm}?^CU0heqKav4fd}9LkBwpLkBzmT6Tu%?2xz<Pv+=g
zW|+;$(ag;7iBYDPnW2yA4b;uA7#Um`7+hpe3Na-5GBB)TlszuQaGKGHfuWa4_NWlU
zDkg6RhObPrhlLm>G5azw+-H_OD8%rGxr%||8;k6IA%@AU-V6-)SY`JLG5lr?WMDYM
zCc9gR;U!xN1H)N%*|S0nuh<<J7*=t}?i6A;&f(3#Fo{!fyAZ=>PG1Iw1}@pHLJV_3
zhfCh)lHDxC@COtZ$GK%U3Nbw4_GMt$$RoR6h~XR$sHVNjD|=dq;S;YL1H%qJ*|kCp
zSNXgd7#8r$t`=h0%kRs;&?TU_Qix%-Ks*COm!Rx&A%^9GCJYR>1!b2CG5in&C0ua3
z9%NQKqwGaSh9!*7&@h|A2x=C|zF}n8$QTF}o5;k#Fp)|2E)&B#CRebSay&;b3n&~r
zSQvgVx=mtX*vBN$#=>xqiS-jR!(*l;pdK$M9iX<`X;UX_GlVfPgmHdgWGE221S*T!
z4uLuzY^N9*A271LU}XHm$kxNe*aRxuKyeLD<1}g?XfyaTF!*!6WMs$`I?c#1osn%f
zBjZX&w!`2i=|e`wuZ(Q1OpL!ll?1{qp!R_e1A`C$a#n_7&Q4Z_4n~f*EDUoQd0w+H
ztYqYR&CGC^k@Fri!x_e<j0_APnb^)TGqy6b9b;yk$;@_*nQ=ZdDChlTVQXY%oX*NN
zpOtYdE89_4#@(px1CTwT3=E;5He&&(&DaTUGtL9I8MlDjj0eDN##`Vv<7064{YH8l
z)|8RFZ~&zbaQkW@3xhYPuaF4pE98Ru3hm&&!US+Dmc+h-HbV>pLk#CHMutlMJD{}A
zc7~C08zb8lM#fi+Y@Zkz+nCs<F){X_rVm9%aRze+26LfWh6J`!#!g1InT(A6sPP6;
z18S@D*E1xrl{0oTvdv~>oQSH1+^_(dO%3~H8ATbi7#OtpQy5&?Vi*h98W<VN;Eg9p
z+X>Vs;%{SQNN1~KoCL1bXCQe3qQ;Mb!H>V4ks*_<o^diG+hRt>nIx;RWi(+}#|SE0
zMSp=3Guvrq##@YBznB?jFv+#BFl=Muy3EXQfayLX1H*h4w)HHGOA*}$P<@Qnw*;kU
z`t*U^8EhFCY(ah4AW*u@0Hw<kP`c~`r_1T!J_^a{64amM{Km*o%725AVF4rC2}Z_E
zjBFPe8J{t-y<=o-Vq%-b#MptFE@_oV$Z*SlMqY4nr#hXHp~N0ktTI5-3Nif)`l+E#
zAB5O`6MlbXXO?Fc=jW8A7BL{03aJ$(sd>ej`FRSt`6;D2sSJjC#(IVf@$qG;MIhn$
zc!v0Rka{G6;*z4&y!4U`hOoq<%*3ReRECh$qFe^oih`olVvr^V=ls0llA^@Syb^|B
zh?Op-1v#0?i6yB>QlWXpi6xoEX<#!@#Iy7A%kz*#;JRH?GE3mLhUS6Qp@=yprUa!H
zmSz^ErUb*B335Kf=XywHDrhM%C}=A%DA?L67%(W<fJr+rS)%~+qJpggLt;)&esW?-
zevwUxos|`om#R^mS(O@JqL8Plpq{6$kXn(PnwnCqkegVMnOmBxP+VG2kY7}inxX(w
z$B>E$8~4-_g_4X^1%!}7eo|Iya*0A(ei2wE86J9i4EY5NFbzeiIf*5i`FV*s3i$=8
zMIaYp)dEuw_X9R_ZEYEBY#Hoq8T|cg{H@~S^HR&><3UlFnhbI@)J6Ge3MG{VsR{|I
z#R+-}?)fD!ry@%zWabr@q$Z~5F%*{+F<^6LZfS9eLQ<-Nx~;9cj)J-kh|DiiP`3jy
z^72a*)K!bs8H%9s1~Mi;FP$MbH6^n&m!Uj0F&iY5nU{`;7PyHpO(1iNON#RI(m^bU
zR&^Z(kS!ns)O8f#_N3+HCzj|aWagEC<#RxGAte!{WL%nuh(>Ub=cRzdL!(#|9P+Bg
zRtoB>dWHrD#p())c`0xXgAIeCEdyxfa%xd-F+2uKN(*vQ6+mGJ5(XQAniBLFz;U0L
zR|3}q6TqsvEHS4v6|NM_!>SHBj=|nnNXabA1Z7o)q)LUV)S`T(GC>cTWs$-Jp8t^Y
z50W&ljEtfLoL8~yhZ~QQl`(bWlE#+l6H`)<EOSgrQAkt(hb6dJQYgs>XEkWCz)+N$
zn_q@pHU)tNQ1zr0<>#X4%1A6mvcoweH91?MJR`LvBNdX7A*O?@ElJGGD@HL*k0CK7
z1;s;PH|8m%reu~VltD{Y?CwO-26HL8PHe74(MW`Q(ah9iC@xJ;Pc1G91E;%S2uC4N
zAtkjKRN^WiQeYy6*3yEM#FA84;a$uS3g&~u6RfQmoG~&|6`-L83xZ-jhN9Hs)Dngu
z5D8WfRRN8|%)HE!%)}gp)ZBuSN`>N*#G(>VS&^Kdmy!u8FZCEwN^=VsTuO5b6cQDR
zQbDy;YF-JrEJd?3Gp{7IC@(Rm7!>ZsB`Nu(B@o@kq-#$El~$Q~>3R$y8L6N&0Id-f
zG7^gw5_5`D6H_V`l2TLi6cSTXQd2-Sf(iqO-!L>KC+2}mu%uK4Xr-B&0#O7hsx!+{
z6*7wzO7oKPOY>4da%qW~IS^yhGV@Xta#9n^K*3U!UyjW|pwbbyWBmQWdQg)lasF^r
zfECf8@Bp_P6p|D3AoeMg7N@2tWacT9WMmeD!l)#*q6FeCOuHajaD|skDY#xN$;{2H
zN>zY(UXQ^aT(f4Dr84Nk07D3<$&y$K6JhYoOG`~G&IGk%U}|)A;cc5@22iF^K%`rQ
z8W0PqsZ$Iy5iFj`P{vTqkjjw4pil#9WGFzI7zzvupf(0b#8$yT0opv$gI2%_C5hRo
z#R_@(3W-JOrManjCB?`EGpt4jM@B(WYD#KaW?pKF0;q<Ahb|=ekYpj1A#Qm{wTW9E
zBmVRll8aIkOHvt}!IT0xr9)yCT<Cy{+02sC)I3l@mQ$Kq46YpW3qZLsF-M_1H8VY<
zM32EA)b>F3AGjF_%lAmt8l;s8uh~jci*i9VWolYlW^!gKN@WQbLsJcF6oHExG&!V-
z1{Tc>d5O7*iWS6xl@u`N=`j?imiU2G1w$Kj*i|Lx=YbM6+&54@tRa<HT%4brnFwlg
zm1mY@fQvU2E1aSFVYZN@ALi@i(xM`GY+(AN#vjoH1ox$o2=G;b5dMe#e!(Ti@$rU+
z7D<W4naT0RB}JKe>7Z1ZnCqGB7aVVBn4FPV6c1|pmlS&@ySn-X$6G>M%=twi0asTB
zm?5Ufh9s4y5pM#-ISd~l9X|%pO(0+)@E&Ur3pTz5Vlyy&*bk-ct$GX|^;Np=gVLad
zHDIa#VDP@)&jgef3@zae5=8ax$^elxGRD!`fhjPw^n}(Tkam=7QBi&oT+$rcX~AN!
zA>39_i33ZCaGjP|S~&<wsIjOz!G2?K=m$;rGc>g5IfB9kG&jHvI@5{)GR5q}z`y_+
zGrbtW8pz1N$f(G`1X9K*$juU{$iNgR$j!o-$jBUcF@hCDGcqzT1{yRnGC1%vFw9|Q
zU`SzLU?>X$X@T$)_!$^d!WbC}L>U-F!WkI^BpDc7!WkJJNHZ|>gflWYC^IlD2xny2
zpu)hg10=7;!0;!Wk)c4Hfk7aGk)c7Gfk7dHks&~rfgvP<kwL+Lfnf@WHez5n62Zve
zV9LPoCW4Vcz=DB6B$AQgf&~MEN+cu00xJdvn@C0m2U`XP9}wS>fuSank)gqrfniT1
zBSV4*1H+j}MurVu3=9fUj0_vR85km>7#R$F85pKSF*1DcXJGgP;s-J?s6;a|d<bS>
z=!s@z2nb_fcmU!@Ffh2pFfxG7ZI}U~V;C4N#4s`#q%$yB#4<8GNM~S(iDhJ%kio#v
z5X;EmkjcQXB$kmOA%}tCL@XnNKrRErn^;DM1Gx+g0&$EC0eK7z262oG4iyXxA#sch
z29*p9bK)2oK2$L<TmkWG7#L*Y85us*GBAY1Gcr7AU|^UK&&aT$k%3`PJR^fb69dDU
zct(Z;Z43+o35*O3y$lRK35*O1vl$pV5*QgA7BMi)0nv*Y7}kK~S1>S~Nnm7XSi!(>
z2gF~?!0;!5k-=ai1A|N=BZI>Z1_ql%Mur7D85lwm85s`jXJBYZWMp7C%D`|Sk&!{*
zI0M6lL`H^&iwq1O5*Zm1ZZI&oBr!59xW~W{0iqu;Fq9-QGB~_qU`R=3WH5Nmz)+FQ
z$k6bHfuSdvks;s{1H%Fk|0e^(iDX8Ggg*=nH<B3{HvD5?&`DurxbUBWAti;8L4k>p
zVMhuhg8?%m!-o_`1_m}phKN)~h65ao3<pvf87^=#GW<woWKiH?WbjF2WM~j!WN1la
zWH1nAWLS~L$nZdvk>N}lBf|$VMh2F2Mur9Aj0_s-j0_W`7#V!h85tC$85wfY85u4}
zGcrs_XJlxQVPx2n&d4C3!pLwVospqHi;+PhgOTBb79)d31|!1;T~M4dGB6l0GCav(
zWKb|<WRS^ZWGFCZWGKjFWN5HtWLN<@6VaBDVNWI_!v<SMh6kC93<-9O3@llU3<CCy
z3>qNXiIKr4i;>}g6C*=G79&G}Gb6*4EJg+e7e<C1S&R&z`CcA|TMQ+j3ym2Vm_h6M
zF0e3gKVV^CW?*IDc)-fQY{16AaDkJ7VF3>V%LZNsP6a^*#sFakX3$&*D8F40V_-0l
zU|?;KU|`%J!NBoAnt|CthJi~!j)C=o0t3SXB?bo29I1gi1M>t82Br%d4D1s$85k0@
z8CWjpFmMLwGB6kDGB9q?WngB|V_;A)WME1#Vqi!xVPI-7WnkW5%D^OG#=z)c#=ttk
zjDhih83U7oB?Ci&H3NHr4FkgmTLy*=4h-xc92pou=Q{|vGcauM1Rbl#z|P>yz;MBz
zfoVbr19Lzq1Jj3a29^a83@ibW3@i*$3@jU>7?>1d8CWjFGO#qnF|Z26Gq4_rXJGn}
z#K3qUnSuF1G6Um-6b8-(sSIoknGEb7G8xzpWHB&p$Yx;uki)=!AeVt@LmmUGK>-6}
zLm>n6f<gw?f+7a42Sp4F0p$#g4do0B4mAwS6KWY457aU+9B5@=D`;onc+kzj@S%@^
z(V(A!*`c3-BVi%~C&Oe0R)#4I3<qW~unNp%V04(tz)&!kfpNoJ2Id2E85lk+WMF7m
z%D};}l7ZpCN(M%URSe7ts~8v<HZU+2Y+zt+*ucOLu$6&n!&V0716vu`6}B@lP1w%B
zykI*6<ALoA%onyZFc|D%;P|kgfl1*Y1KWXv3`_=x7?>RnF)%2cWMKGkhJh{NJOe|*
zMFxfow-^{1ZZj}^xXr-8aF>C};VuKifqM+h1@{>k6CN=zH#}lsJn)EtS>Q1P!-A&_
zOb?zha0I+yU@CaYz_{Th1Czij21bWh46GAgF)$u@#lWQSmVu$*Jp;>x4-5<sJ~6O-
z_{_j?;2Q%&!%qg53BMQ^9{gcoVfe?ubbyJGnSq&+$$^!T<pV1t`vNvbmJRHTED0Qp
z>;jyOOaa`CtPi*u*(dNYvN-TEvV7oWWLhA|$XFo6$UH%akx4+9k$HkJBhv*DMiv2a
zMkWIZM#cgOMkWJkM%D&tM#c@&j0^>Gj0^`97+D@DGBOybFtP-wGBOEhF)}-7F>*0z
zGcq@5Gcp~}Wn?MPV`Nd#XJomc&&bkXz{qsLh>@kin32W6gpuWe2_wq{Q${uc3r5ZY
zOGZWqD@L{hR*Z}c){JZoHjIo1Y#7-N*fKIY*fX*@I509cI54s?I59FhI5Dy@Fff6t
zK1e-DMbZJ*;y3~^bOPwSM-X!-h+tq~xB#IiK;<v=K=|lnM+>B0y#ZQ2!N9;^0Gc;t
zU|^7d@@K$km^hR+fQrM+JJ1X<=L1x{0IJRd%HIH^Vd7A_04na#3eoQYH3w!cA^Jlw
zMBgtc%@P9P^Fe6|D6Isgb)d8vly-p9UQjv&O2<Lz3@BX$rE8#c8<d^^rDs9uB~W@D
zl->cQ4?*cOP+B7l;?5UP{wFB?2THSrLgWRYv=o$9fzo<V+5$>DL1`Z-9R{TnpmY|L
zE`ic@P`U$3PlD2Op!6~*y#Y$^g3?Ez^f@Sf14=)F(r=(Ndb}Ay{re3j59M<~X%Q$b
z2c>PGv>TKTfYMP=8r~3xTBHF@KOaDiPX-2t23d#%Xe~U5^#R=e{{J7QPCyPM%D^xI
zN;g2|VKhvg0%#Ej0|UbWD4hUJpBm77vj9q;fYKkJv;@>W6;OTvl->Z<-vOo3{Q+?m
zCW-D3Ocf9jnEPQgL==-G)?S!7FdEY&hzQJm5FT!F0yLaqG%l3_Pzi-{NIrma;nV?W
z_$GiFF$@e03!pSy0h9r1lYp2Bpmqs}fdfxKlLxnBkko?u6d*3lU8!j9dYc1r=LabL
z3QBwCLBs>lXm2Rr4@$$#WvGCt2iXO}FmYKlagZDc!^&w``EM`}B+9^00Hq;31_oGp
z4XXz>fHs&gFfe?8(lC4MaM%N@2N<CBivyHyfYLDi(Kz(O>K9nORRFEOHb7~Z{xUTE
zF!8Bq;xK<nK<g1$J(q9{B69&sLwJaK?E|zvd;pU_4p9NKXA=&4W<c$M)vF8_A^HlS
zw08+4-h837Ka>uH(!o$V7D}f;>1-%XY&s!U9mI8z{x^(`kG_)yasP8D4O8m?TEEV~
z!0@00A`cVCMZ+39pguY_HK6@PAaMr~5Xk^q(F<lMh=M5wV+h6I0OdPD`JlcBhzTBO
z0F}p}%`FTJpcU^RVel9Lh@lT6kgyjL8^nYb<KVFZ5FZ_1%ZB(9UYxkFK$Jl3i-hKD
z1#O5hRF)wHDn1EH8=%Qgg^H^|%}+p+KMIwHn2MW(*$=Y^W*$rjEPO$ChJwW0K?EqD
z+dvqw@D%{<L1SQGfQ2t;Oar9C8ALEJFeE?&8cHz~bVI_$5h}j`${-Zp>@f9EgH@n>
z&~ii&6Ix9(B)EWh3=GifnBf4FuL==n_yFZYxp3-dF2r5v@s-d85f6lxuNRUbT43=N
z4i#^L(htz&yP@J6p!5ec`E5{fh^e?qnEfz&&{e|X>q9X_1{U5EdLZtH1~Y>~2}GVy
zctcbn$lp+RE`SMvcGNL2FbF`+Kb;S;2Q)?mQo;cu7#J9CLdAbVY0%gaND3y;RRGba
z52YDE+YcES7+~_YQ1M(S4H|O->4)fIV5o(PFND$tkY&km{ac{occC;xEl4-Q{I^hX
z#X^XQpfN0v3J8yZK@Tb(1EmY<L9z@Cu=vk`icg2q2cQiinEYI*_;n}^8XE(d0F%E1
z6&Edn*a#XkLzb6@ii5=PVOabtyo0C$4P}5>6Z#<L!qP(mXrBlJ1855^NE(jO%Qv_L
zlraTbzrgA@SO^F}&7T6zKL$|yq1qT$K=r}+y-;-lXzGrkscVC(D?n5C3{71vRNVwL
zb!?>&f5Yr0mDYgj-vBiiW|}@!oRE32_&D$pq7qhqGJJyYVdbX+ln*OEL1T{~70_ai
z;lURWkAVRee;YtMrjY6@g>MjfLirh1eopufQ4cFWKS24g@-yHEL>^XtZh-RP<tLO6
zQH3B&py7gEelCEfw-%^8gAgPnVd=jYD!v9v3!usGfQrM)PX#pjTTpp~OR%$G_QUK!
zR|Si&3yKi;!Qv}G3BrfPR{@j{i?0PxJ}ka2K>4uvN|*%E4=aB`W49o?iH)xSHHdy#
zd~Ja8VeuuP4v~k&R{@j{i?0n(KFCgF{0$mz=<yXW5n>)!8H5h7gwU|~;)jasLTS*L
zGRRz5d2I?64~EhXR;c=;q2d#ubOKZ!rhhh692VaTpz<*LFF@r%cH+Y@_ru(QPwlAO
zXb6mkz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQMeD
zjE2By2#kinXb6mkz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQMeDjE2By2#kinXb6mk
zz-S22G6Vz_J;I$L9HU$ljGdyKf}Mh$Vht8B6$k`71vxjca6~v32owl}IEFZaNC*az
zAbx_FTaaUrYk@=qD@TC_NL{ca2o@*=J2kLz6lgTCa3G5YITvsga1<C62!K>!;sOnX
zd;v$0BOF@<ITmPOHxW}5WNv{%kex$(K&5L%a(qZ-K`Qt_B8I?1jtMYP@Ns?+VYpaS
zYEixm^pHp-DL2sJnP6iWz)B$pti`((rKU18Y>-&+gBe-a-8tSRHHV>L#y9B3V#Wn8
zm=^5dTCf754CI`6_td=9qReDR&@rnF4IS@cN)Ir-U|R5ldBF*0i1PS=%6PZLoZ{5r
zlA_Y&5{8Bwpli!Osu?FZ#Dir0^7CMlaFG(v+=84`&{1mO6JbFbp>iQbrEt}P4)O6(
ze!(S%MmgYv%;Hl)r#875x|T6CSb)@nB*8~t8ADF3%EWi-m8**ZNaF%f2!o9!K_}Qs
z*nar`V4tDeolk__0oWW(KqEe@O`zT;U<BCTJ3zOb!Q%z)dLoU$=L*D;VX29^IFAf-
zH7TylO9sWj1tyeWAX2+&W^Phxs%wz9UvRt`=<MH|R9A+E8z3KnFlw9-Yi4O$PJTJa
zO{mF<NbP3DsU=CJX|4guISdU4K&}8`vRq<TQJkJyk_RzLft>I$t0*og1)1Ota*YHz
zMpzbS<d^3r=2b$X3FH|NM)8noGU)7K&tzAl%oJCKh96%+D*pe6=7|tb*g0}U<w#eC
zh7OJ`V5ef}#Gj^J85$JOH4>MUL3YBk!T~0Q1qUGhLpXrQ*mq@Um;lp?UPfUFnBWpi
z&~en@;}#hjE^uwybAZWW(;kTD!~6^l;^UJmDiV`2%M6VS;^T`m<CAj|i;Lqy!5yEO
zmzEC-?O(9aP7qiyfpHhA8JLdf_zYJMsu<#<LNf9U&EjEkL|lMDip>>l5SNl;0Jg#-
zq|6{b&&UMs7NXJ%#3>aJmyl%u2~NSE03l9+6zDkoLcjoGoI*+rJ%Fcbtl>kXQP^CS
zk`qwI(D339EHN$k0cAmReO`Gk1DaTTdPPNiL26Mk=w$TFlFIlp1BQkja1$m7ImE}u
z7ndX^XU8XJWXGqMCKjcDlW08TfZqa!2@WZhIVthQC8bI6Ntt;msYMJEAgxSLt02S^
za`2-ewxb&Z3uiDcxWNMn2AD$ZCp9)KfM<~f7kJThz>j`JJ{PiK#V4rm85b<zM$>`y
zkjaJ{U*I|#&|QV~h{%Q?2v_|;b(ImW11TFC5bo+g(}VkT%!UsLT`S<efgUJ|C<rt2
z(s5So*iI8|IPn+eWmpJ7&7|m%(uNLr3Oc|9sU<@^F^jzg@M02Tc`%-|($E2o5tt$z
z=>(e7;i|BwjR$DnhAYROFrXzFTosPQ0Ig-9%5bNEA7~{2Oaq?ipMaqS8c!6&eFrq^
zp`}AeFs#0zEbh_DH%Oth;WyMWMzjJb3cXNbU?{x61nzP<7Ny4rBo-B?hJa4~XPCfH
zcmtb+V_u4PYGrwTQA#mL%>#%Um(1jn_~6W{RFK#Uh?r|pQM_AzQEp-hNbmzf&@Z*z
zwIVqcQs+-#DExtt@=pdI|C<8R(ZCFL7s#04)DnO2>DVCg4ixd=641H6An^$haY(Pv
zttda&CBGDOQ1%3d!Wj@LpZvV^c*kOd_yUNyFZenUm(;X)&%8{qYgZsk#5<;>Agtd2
zk%9=syF15+=7kg`CTD}3umeRdpa^uS$OMMM0}$ch)WXu#yyVn)=lr~6NT{4ZlYxi*
z1&9QgAMcl14l?})vRDAr$_G%v%7WB*kf(zSQj<Z7Um%G?d#zAoJ|M|>76+Fmf!b0a
zEk7U<p?R6f`6;RKj>VxNZWbVk1{QEkz$C!dz=OSm1ypN==9OpWrNjrN7MJFN6Tt))
zNI%sZnsbaGoifNV%)}m_Y@VE-TaZ|k>YAGy;GLWca?1=<b3u*5vee{~{G#}z#NyQW
z<ow*+{5(&eBtutMkX%VdQGR(ma!1*<3}nm#G-C|oLHgn6L=$b!3RH8TT`<ei;?(%G
z#N^Zx*C4-OgLrVSU;;zo22|A+nfb*aYg5hAQ%iF4LH&yf423(Oih@fF4Ri9-Gm{}5
zd}kxq0B>;A9)K$V_tnV?S+m6A;>`3sn2{%_VPsJ%=>8bjT(BcAP{BmYg3{uQ_@u<-
zY*(<EH>hByv!QE{cd%(ZC=ooMf_aI-2=iW`ng?yX<8Sa97nGJjeD{G2-KJ?drNtSL
zB>jU7{bt3fc_l@apg;*W28T%lD<t1Svn^46fSLjxf||fk*g=L7ImNEvLVp5zdMiQE
zIRjNMkxhFeNa!s<)r_d<@s8d&Lv^npOScPD_XZT*&@u!TnmbS=;YEhAaeRDnYDs)j
zVoFkGNqk~nN<8Q;gUpoFqWGdzaIpvR*a7VNO+Z(XxCVd=!xPw*m?r0>CKkD7fx`9z
ziV{mmVV;>^jJ+gxh576TTpOaB50iTUkqa)d1YMGlnU`3S3U<v8R#1f!p9H?@X97cE
z0~;ts#wS-K#zT+iPmM>68GzCoLP1h$dS+gHa$-qx2FTSD5HhKt0D_6nK!~RmrJ@<P
z03ituMvy@(*uZW=bj{KdGjl)^8$c4dskzAo;3T*M$_3p41FHBz+~Sg=oYXv6H;sXT
zF`j{yfhnL8)YbrBR|dI8g+VSpib0u~;eZ?i1CyVjK{nzJG>Z_1b4(1Zh793mkaPMO
zt}t3Kd}K5MUqZ2f$%x?~Hv@|SLl2`VC{_`nQ|wyC@S4$*;R_eG4&i@p1|~!wj$s8O
z(sfe%7#WyMgFyGbxRx>OWHe-Wz`@M0K!X8E^gN>>LpLWg!-Ps?(Wi`t4BI$a7#f~2
zGO#c#c*e-Uzyult24BL~$ppJ9fMGiml2Nmm3>hARG~HoFHftl3Awv%r3&VoPD8eV0
z3>mg_u`p~n%)-FH6zs=vi^<cE;Vvr!6SUA`c*g|39D(6GGXoPv&Fk09h=PFB-rh%M
z7M2C?SQ%JYp1xycU|=$2Xk;;BXk=wzvSgUcf-ORhvNEt>?Lqx!!P-;W&x-Y)g^R3M
zdra3^Sy&oAfLwyR=d+U)S4a9ZD+|lq4<O&{1CzJG<To%m^&?1p(?^imuqgP*3W<VW
zY!pVpJ2vv8;0qfI%Ze^G1{RhB-Jk=s4H;V4jTl-uKvA%O9a|Kf;J^_D|Jboc!66Q;
zQE-I=YZTn#U}2fj19A!ODA>b+D+<nWu&`|C0r_S|FNnMWCTH}4*c-s)jDC>Wrl6@R
zX#WJ%9dKo6<upKuxiUNg<*CWom2N;)`htsv;lecRN^heoeaFSZ@L>jarEOd&4w%lv
z!th}ZcBLCpmCog1VVD5A%9{z(TX#^EF6LojIIx6`fra70G7$L!Onz9##=yXgHCyfG
z#u`iAJXqs&A`c7050GI@xFhc;H?GKQ<zZoHSPpVl2bi1yCTD=j1z>UonA`v+cPwXv
zjOsBk9FSAkz$u{cfKkC<0s95k2}~0hConcJPGDTXxPZ~Yp@G>TAmIbk1EvQ|0R|h`
z1Pl}|FkfJ8U|GPnf$0I0!vV$%%paH<7!@283>L5*U}|7VNJub9FbFW%z_fws0!sr^
z1M>ma155{4AFwQ71@SMif@DBgKwtsm1*Qv38yF$*0@DI8djjJI#t)1iKyr)<7Z?o=
zFm7N4iCkd(z<7WOL^d#AU{biiSnz?v-~i_ZCW8in4a^@nCa^gOEMQr{+`x2zGr(X2
zhl9WdjtT4&m=7>+U^>9);GnR8+rUBK0%wB21EvB21%U?~Ak`Nb6BaNQ2t44u!2W@y
zf#rjc!3V|(Ob!jq3IYlW3JM=sL0+4{dVuKzQ$j+51Hx+ym@cqPV7kD1fOP`X1(pR&
zATNQifPlgRm`}j|0r5b70{Ife{=jsBQK5lR!NA}E(*aO;IV6De1sEJ)1(^fFAU+5i
z8~}wZNE1ji7(<N)u@5jEV0yrGfw6(np@9(u!IT3Sz`0;a1_lNpF$M-cMg|7hdAD|A
z3=DFN4GaR%^KHYR^04!6XG8h0^KL&w`LOeD&BP(*!Opv_fbv0i+<?;7At)bo*A0mO
zR~%v=?A&t?2?!r{{`q7mA9mjP6DS{c?zyQXL_O@B^A0E<cHa4IC?9stwz3pNJ?y;g
zY$zXg&h~jJi2GsZZJSC%)Wgo#ZiVt;=WAbu@?qy|tH?mqgYNDD`L7kq7f=K77#M!a
zK+Ff-<pUCtmxb^TsDpS63|6ub{U4zGR;WDeyxy}=KJ0v6UO9+<*m=CcauEFu(DQkh
zK>47%f<QK0gYrRl27&l2@(}e3Mj##ogEf>7x<d#gvINR6Faz-z7(PJx7tA62E(M5w
z&|O0yk)u#P=*}S!|0|Rax_b!3S5bnhho0wqR0-mK*m=KP$`JkssC*KX4?FL7DU=U8
z@Ao;B4?FKyLj|HAcD`>Bj1N61aUGNoJHPi8ln*<<S4|b7A9j9kJd_VRzZaH10$f49
zLBuEM4keHXEIuzl<!7rx{LA15l4W4{1LYgIL-=uO5P8tuOCXVLP(J7mCJ_GzjPDKN
zF)&!DL)3%rWCDpyhw?#pGlBRIpnQWs5RZYuSp%XTbXOBdWG0jky0Zzy{{iKHhyd{z
z7@{>H>Oprnfkalo_%R?J1H(TkUmzaBkJo~#Pk`_bLiq)W5WbQ&M1DaMgx{hK318TG
z#>b(2*!ji0IuLo-dBp)b5d9NCmuN9CFl>eLAEZLelh%dEgYK9DiBv%Opu46({0C4z
z=*}q+-%byr9(4B<h`$`l|BwmdF);AxL*=s}{30kHbVn6P<QkL@x~mGrw=;mK2i;i(
z;xC5sL3dYy`2V4NhI|l@fg!~ZqW%Mve+tTf0KI@f-v}aaPy|sw)d(6M(DRiqLiq~N
z^OVJnA?hDM`DRf5gmQ@f0Aq-L*!jq_p?uhR$S<LM*!jmMCJ^<o^Nt&#eAxNM7omLE
zdB$?4Q1#IBjWeKp*m=ckpnTZ*#LuAo2hj71<;@`aVdoD=nnC;ny1NVHgK1Dc?0$fq
zQ2v27kSqhkZz#W^8^X6Whv<jh6OaPsgYNzUsaOW%PX+N97%oEj8>T_{LKYDHpgY1q
zB2G{~>|TOYC?9q|K@XJQ0KKnZEtJ0idSLZ+C?9m!7|1+7ONjXni$Od@`N6OP!iSX~
z0Z{%?sCv+yWFQs#RuKKL`wzmPeAqn*jZi+rT99f6hA&V)=ngZGNVzpc|A8$a9s|RB
zC_i8ugwJaOi4TG85PpUYL_O&4GmuCxln=W{0aR{-+6xXlL9z@Cm!a~odlh~``Jg+~
zKq{<kq2@vFO^Al_3!wZOC?9sO!a^t?bk`b4|2-%lc3%Rs9mKp3(0dWopnTAsY#{Zk
zp?vtg3Q#`meubY<{sZWJ3NrQ(^BAD_DmX*=pu62b=AD4@3r>T03=B>Vknoyt7Q&zJ
z0MT!79>Tu|<-_h>V047YCqVBvnhWK_?qfIr<-_h}cn;-*?vw+WsOSXMe*?s0VDN$R
z4emhr`A|OS&N-0CLm2-7h{wPHtG@(ZLHKUY5c5EH(t$)4L;0Y)=|KDl7pVLj5RZXj
z2b2%GvkoL8;|lRV?ELR=C?9qn_Y^1}cJB6NC?9sdwvZb{KkOXsU??AU{`G7qA9kMg
zWhftZp0%7iL_h32>kud(cAj+yln*=4`Y@CaJHMLE1EL>xKD9NJ4?CZ_0Lq7*PrVe%
zhn-J-9m@XzJ)asDz5zc$p@YcZ4NyKTe_w#|c|0NE%kT@LJ`Kt@_zmG7gz`am0fIz?
zyddg9cLIX=zED2sZa@%!C6o`kBM`)Y59J#`?#yGb^M>dL-5Cgyp9STE?hXX;A3*t_
zI|M;|eIJN=&|QKceg>59zzaEF`4E&}0O~|AFfhpaLewwdhsc*h`4<Er{7b%&@P?ho
z&Fu%_!_MD!hw@?PZP)ri%yWRAzkM6ZPY{NfXXOu(2i<W9a^D0fA9U9ti2nr22i<uH
z;%fy!)PwFm1o0c8e9#?;ApSKdKR^OvzDXcNeSsu|KN-pg-IWMZ{~gK)-I)mDdj~<(
zgYHfQ@i##EpgR;ne4${7yn!slyaFgcKn}t`59JrgL-@KO5cLlfApD*XNO-}{3qJ$p
z!_Etr4~59X&I^x)@?qzN&w}z{=Y`*d@&!~N<|~Fl^ux~oj)wAK=YLOy@?qzHpNH~c
z=YO+>L-fPW|8@w6xL-g6V*WfR-$4_?hxdoHAbeQ<H~{7Ug{lYLH3<s8pa_V0pgSi)
z{H0Jn=<Z1n{}GfAx`PtLH;sg-2i-*p;uk^r6HFlXU4Zfrm_qpaQ4sYX%pm+;C||(>
z!haJ5@gMAba?NN6A9fyj5tI)*e|!^^p8!2i{2!DLJ73%;24dd<sQf%A|A95cKE_yx
zJcBKSp9$rI?$8AJ=P;BHx=Rzp*NKCu2i>U&;<rQj2kasGUqbmG93Xt3c&K_O2!Ab<
z54wXBq@O(jA`iNY6T~lo@<DfUg7~MQ{0lA+{kn+|^#X1X{sbsLz#YQ>1m$n=fbc_;
zpyB5U;ZKJ08N49;i%`A;lrNkNQGWo+hv|3lhNy?>KLF*!^fUNC<YD?7pnT61h<OaY
z5cysx-vP?M2IV(E`5~zg^#`E**-$=%A4LCsDBl6fhua6`!`#c@4^a<uZv&JMbMFBt
zAEw_S0HPkI{{WN^)9(-nk%#GTfbwDb8KCzMXQweR@UcMd9|px=EtC(shZw}~hw?%9
z5rg;(p?uK2#324=C?9k`F^GQ*$_L$34C3E}@<I0%gZOWte9*ncApU<SA9Q~)h%b~5
zaS!MoV-Q~z$_L$N4B}fr`Jj7^L40p0A9TMlh#v>#gYG#7@e84R(0#`sehZWjy7w5w
zhxr$D|1pRU^IuIQB>bjB)r0Os2Fb(h1Ko=Z;;(|rgYHKL@nQCX?nwsm_d?}C_a%e)
z7odF5y~!Z{V<;bVe=>;w4ax`IqYUD6W<bINbe}SaF9YR+?o|fy^`U&w{mLM|6O<3S
zXBor~h4Ml7Era+OP(JA1We~p_$_L%Q4C41e`Jj84LHvbKKIlGX5Pu7l54x8b#6J$@
zgYIVr@oz!-pnIA@{I^g(=)PtU|38!uy0;m`7tDnC4|IPsh_3?WgYIz#@y(%p(0$Gz
zz891ay4M-RkA?C<_dA351yDZdo@WprmOjDvJu@&cz|tS+-e-_}GgLk3{$~&$mfk@3
zK!f<O^aj2Unt_1<9{-^Gpcxn#ra|?C?}uh!V1T7B&^^&0{VSpJp!=dh{5?=U=-y}$
z9~S?h`=dd8Sp0$Sk!E0EI1g11x=$J;4~s9*z0x55Bd9#+erXW@E0hnqXBx!k$by6~
z=)P$XUmD5>-8&89>p}UT`=>#CM<^e34>gD%0_B75qXzL|`2}<@HHZ((AE5iGL3~(x
z2i;Q*;=|HA=)P(YALd`sz11K-EWLy7uLkj9=^cEJHRygIXnFzNXAP2vg%9XnYY;yj
z>ORo@)*yZrln=V+8pMZ%7wEog5FeJlLHAyR_`Oi|p!=^u`~^@x=pJkkAC~?>_hEzh
zu=EYO7aPQfg+J(iY!Dw7{-ArZLHx~7^Fa4ygZQxW1$1vVhz~0tLHB2a_^|W_x<?zt
zho!d(8IbY_R^EW_)dtDK(i`Z0Z4e)p-az+kgZQxcSOL0k8!0^ZK>4un1l_+4QV$D1
z&^_EBJ}mu#?&AjWk3szdx|bWozX|1o?&k*a-$41Gd%8jV|4=^YzHSg-D4T&nj=O;Y
zb}w!Wln=XacNvr~0KH$7CkLV)cCT+Clz#&%e;vw)-M1^33sDcdr`G|>huzDY2Ia%<
z@9lu{VfXy5fbu<{_amNx@?rP?zJu~NK;;GU7#R3?7#Iqm_X+Aj`LO#1L!f*H=zV+L
zQ2qfZe?OG(0KMN$C?8@T?7lZoD8HZ^692VO{s$=k0+cTRz5k540HPmu-<mO$54#^L
z1ImZp$2Akmhuz0@8p?;=m-Gk9hu!z2RR}TP0D3=98I%vZuV*!s54&&ZE|jkTy?0Ts
zh=GBRmw^Fx4`Wyn0|Rs@7nD30R-y5C7BMiSF*7m<=gbF>FoQxtkCB1Ffgh}#!Md1%
z0n}B2@%@S!7+N4wU=pMrbY~IlejyMa)bv8;gUo}?cO^j0gYkQCn709X5fIEgWIo6|
z*nHW3sCh6xNIl3s++f#1?z05B2gbi%%)p?A)E;{Y;v=Om_7VmLH6FzMmAW84Qh0*Q
zPXQ57^U=dMv;;N0Q%WG=D~cAr3efq!XC;vEh4Hyd$qQfDe4%V9#5@>33C+ARH2zC8
zeoPsv{pZp6Ag_bM7dpJf0OEtf7nu)=AJ}}QeL2KEFg{2<NI!D=0O?2OgY?7ZH>X4O
z!}u`$2GH;a@j?2L`5^tU`OsTX{V={v1*-ohpz&EMQRPd}_%G1-f6(|K_kjGjfEnUI
z5Fg||WIo7!u=&~iDoFUi_&cjm?LUIXx2i^!_d(->>;u_<0eZ12h!3(KnGdobHXqzn
z4Y41_-;2XM=x`D|zLEJL^I-GI7oq0C_#pd1?tyM!g6l`-gY?7ZpJi$w?t$@%)eoDm
zPKWA;@nQO*(;r~-K;ePR2iXss-`)Y$597o1BfA%*ADIu*51S9?tcAEA#)s*LPEWw?
zN9Kd{!{*Opp!#8aV)eu3+t)+&!}u`$uzTu2e31K*`5^mY^Yh=K`eA&KdQki!tH-7v
zHlJ@_2k}3Qzqk%Hy~50E05KR4<ufuL<Q~|10LglYc`!am8^}G#`5ojQWIjkgZ2dqz
zR6mRl(;ooIp78iU=7aRZ)*GCM>WA^=8?gHaIvoo)519`#54K)mN(00{Fh0yZ(CbOz
z`jPn{{jl{7kD>Zue3wR4_v}RD!_0?vr{Lxz^Fijr)=TI$LEH!9FKR+HpIGx?>n%i^
zA?CsO*3H<>Lk@pXd?NEf?t`t@*a$Tb#)r8FIsQTVk@+C~u=O2hp!#8axPG+q+5m|U
z(hpk?BHjY=4~!4f54$fF#0S}r%m?X*tsjYp>WA?`>WN9uu=OVMq55Hbn11B?6J$R!
zA7npleacg)ei+}Z6?=SK02z#wKau$$^I+>+99kja0po+Tf&7D9-+<hM%m?X*t%oUs
z>WA?cpt(o54b^=U(fBa?7|`lFWIotF=z1IGc8Ggn>u;>uQO);2;~z%j!|YQ)N*^Hq
zA@f1@!PfIwcR=ie@t1U<+6Obw0K`BFZ)85mJlJ|5kxqztFg{2dD7=yLGsr#2e2{+F
z`k@4<ei$F7pHTS@TW|CWsvpLO=|6y0o+9(X_FF&{n07(j4_mJU(~q3KK<-E8gY?7J
zGo?cH!}u`$$mJzSKQbSrUjVwkX-YT5ei$F7AGtjM(vQps>2H9Jmz(xL^uzeX>W7W5
z7en>K_%Qv1!XGvszY(e*#)s)gb}u&f!^ZDFL-oV>F#V;F=z-S{FurCVq`w3kueOHr
zVdK@lP(EzDIv&c0jaL^z`LOZoRwy4fUOfZKhmBXShVo(K)%&1)*m(6tC?7Un{RGN~
zjaPq%@?qoET>Vh@L&uwCp?uhQwE>h58?ScmXJCMpouDA;MdM#V<I7DzRiA;zKZnLQ
zo`|Y`CK~@S8eeM?s`|o7kp3xbykiBL{B@{2Z2W_JGDJOW{3C8Ms(GOJ$Jd|jn2g$=
zojw`TpA|>z&rX2mFUcv8`~l<JO~GE?!NM41KPbH-^FjFyw9W+-{sB<)V0@5zP<clv
zzrfZrPKD}+@nQOr?Zak2Z2jX~sD2n9rXRU|0n(4m2e}`%UeaJHBs^ezn11B?0;C_A
z57G}?Us(jz594p1iak7`(~I!@i_8a^2V0+c2x=aT4|5N4eF<_8G9RoTx}K9~8pJ;f
zpnRBq<n}H|KQbSrAGZF}1F9d!Cssdfy=V(mKa5YTe%Sib15o`iK1@Gy{DIt$%m=w2
zwjT8-R6mRl(~s<5kbY!7NIz`-s_Aq{_`&!v{mAx#^ds{@`eEx`XG8VF_%Qv*?PZXD
zWIjkgY<=uOsD2n9rXSfpkbY!7NIz^n?H#Co7$2@5#K6;jfvvx_odNMbj1STV%FoE{
z9kBf%38;A>`(f*K=R@_w_#kB<{Q+q0KV&{gKWu&Pcc^|CAEqCiaiI2r>__H<^+VSK
zTh4^Ie*=^c(~sPL1?flTgY?7J4_89<!}u`$2hiM)%m?X*tv9{_)eqyt^dskYkp0Mf
zkbc<uWW`w!_rv%w{mAx#^ds{@`eEytTcG-3e3<?OH1{L(LHc3qpKn3+!}u`$3()i<
z^FjJy>!mGcL);JJ!}KG^H^}|Se2{+F`sz7Q{V+aEKXQD7^ds}Z`l0KwU&8c5`7r&+
z{Tq;eWIjkgZ2h+V9Ekg2e3*Vh>E8ibpe%suhw+Kk4_hD3G#6q&j8Ck7*n0A0sD2oq
zSpBf|=SQLXVSJc=<o+@!{E+#e@Pn;a=a>g^Ka5YTe%ShUKd62fAEqC9JPu?(G9P3=
zY(4x$sD2n9rXRXI1e|O@`jPn{{jl}(-=O+od}8&(*4xL-hxi}Hhw0w{YAhj@U&ws0
z{pk<|JE8hv>-CA%4_nW#xBy~5j8Ck7*!ur^sD2oqSpBg50xzKYVSJc=<nYE8{tnRn
z1-=U*?uYSV`k~AH;PHpd2ZbMOKf-3Hei$F7A33~1_9OE_`eFMQ#1}#Ahw+Kk58Llh
z57iIj!}KGMM}zD~=7a2q?T>f@)eqyt^dq|$q#v0N(hu8D0a`~7nh%8WVfsHnk{3Mu
zkoh3}u>BWH7eo9H;}feNwqJvL2}D1P57SR5{$cw&3ZVL7e3*Xd@^HBOk@+C^!`8c9
zgX)LzVfqR7zoF~nM3zF_4_hw>(@&^B16xlQ1l14Y!}JrfAGZE(3RFLg57Q6p?}PZD
z_=oXNErax*Ve7Z<K>4usTkoKJ*!nHT<q-9-^;;rPK5YG#I+PDvzhwpG!`5&4Liw=u
zTM1A;Z2eX-ln-0K)duCm)^E*(@?q<@)<F5N^;`R)eAxP}OHe*+{nk?`AGUt$2b2$6
zzs0=*;y>8>EjcJ3wtmYH%7?Ara)I(;>$k$8eAxP}ED)cEfdRH2Vc`nY{`m!{JZyae
z%SuRp9=1Lqd?l)SQ2gQR&o{0_?a%kDg!JcS(E9TX(EU$bt03tW#@AYfz5hw5Kdk^Y
z&jM;5j1O`zsQr$d-$3aPnGec;AE5eUp!#9_8EEcVfyRfKkKCUDnUBl|nGf6FwFhcG
zjL)<hyMK|#H$mnh^Fijp_J{GWhJ+uCuZL!yGa4V}-VJE&VPrnYeAs@o?NIY!e7JtJ
z@)*55hwV?h2-Oec!}KGU-yr`X^Fj8*_Or>Xf%p%`hv`R-KahT8K1e@o|63MRKa3C4
zk8B@EKQbSrAGTj^D^x#>57Up_{{iVo=7aRZ_SZ43g}T265@0a>$nFK{N9Kd{!}j9^
zLG{D<F#X8x1?flTgY<uZ?%$ga)eqyt^dt8lLHd#TApNlYelMZ=VSL?nsOi-njSn-Q
zP<;v8Kj^;>5<W0KOh2Lg4%=_o3Dpnd!}KGEC&+)ue31WO`x6gA^~3lu{m|{3@cJ5=
z57H0Y&-e$bAI7&_j~YJiXneT&XzPy(koX|;Vf!P)*F(Yw#)s)gE?+?YL*|3@!}e2F
zLG{D<F#QLR>;vgX=7aRZ_FwLS>WA@R`U%A^Y`^9|sD2n9rXSh8Ap4Q|Ap2qaJ5@J8
z{14;9^fPcE@)JluG9RoTx*s$hrXR|O>E8fiAopjG_#pkT{iBnh`eA&KGEjYj9A6;&
zk@+C~u>Gc&q55Hbn0^7Y`8{MlNIz_UD)UB&|6zQXenRaz*nU<usD2n9rXRWg3vxd)
zA7npl|En)lKa3C4uYgv6AoD@`Vf$tCp!#8anEnMI22%JV^FjJy`)g-F^~3leZJ_W6
zXG|ph$b7JV=zd)OO_1;d^(#Or2Bsf5y@ULZ%m?X*?cZ&K>WA@R`jN*!LHd#TApNlY
zzH6ZRVSJc=<n#{GkIV<@hwTqO4%H9i!}X)(UjrmQNIz^pvEgQj|6zQXenRO7w*NQ+
zsvpLO=|@g4AonBlLH5J;D>p;+!}u`$$nFK{N9Kd{!}d3SgX)LzVfrVa<sW1|NIz^p
zbmbO^|6zQ%eh>pG{Y*gOgY?7pPk)5!hw(wmK<N*;JqB_=G9RQLw%?k6E5v>nAEuw+
zc-#(9YGq)^g6fCu*M{jwwhx>Au>IWqQ2j7IOh2LchwcB~1l14Y6RRJ#U;HXmKa3C4
zPpE!}?JxfW)eqyt^e<qAgb#=h@;@>k6#fp-{pU8@AmI<=gVclK4>`Yq^ds{@`eFOm
zW1;$Ce3*Vh;RoCAUI*0=<HPhL=NFLu$b69fu>J9iq55HbnEne$`5mMmnGezr+fV-h
zsvpLO>4y&Yf{RU%eq=sKKWzU!>vl-^!T2!!$nFK{N9Kd{!}jZIK=s4;F#X8uM?m_K
z`5^tU{rzj9`eA&Se&qHTNIx<kq#t%3z$K`D7$2q|*}WkB$b67~*!cm!q55Hbn0`X(
zmjQa-fZPs9_`&!v{mAxVvmbUofh$x$j1SYFfENGAe31KL=NX)W>WA@R`jP8vkp0Mf
zkbc<t2QE7y?uYSV`jP8vkbY!7NI&engoRN3Fg{E_a(f)4ADIu*4?ADs0aQPX57UpF
zUP1bi`5^tU^B9<SLEI1H!}KGMzk~E6^FjJy=QqrU>WA@R`jOKcNIx<kq#t(P!y%}C
z7$2seQ2fKrhj<Cq597o1KY*U>2I7P4N9Kd<Pk^2$p}ZUFe<&ZM9#sBaK&wBI`5^tU
z^C$eE`eA&S{sOf2A2J`LA9h|vAyhw%|9CfQ{|IIt@_GP}e~|fL^Pn35KEcd`@?rW5
zI8o~ZWIjkg?0k&yJ&^E#@nQOr%YTsl$b67~*m)Xzq55Hbn11B`H%LD+AEY04{)W_E
zi2X1=Oh58^caVN$K1ja<^t_JqQ2j7IOh2Le2zI`Q{yvEPFg{E_a{nJ>KQbR=KWsns
zWT<`^AEuv>{jmMl*P;4hd}8&(_G>Hbhq@oSUz=F{u>IZhp!#8aV)euJgFl7phw+Kk
z58FSkc>v;m7@t`E4WLK?otFjG597o1!^YD=e2`KY|Mfx0crtAN?q4V$wttuZ5JVof
ze^&|0hwa}rh4NwhcRiqd*#6yUC?B?eHxJ5(?cZ&L@?raTr$G6z{kzMdeAxcoT~I!3
z|L$2RAGUw@0hE6R+F<w$<-_*xvK@x_2eyA#63U0|-_?QgVf%OOp?ui>-5@9*wtqJj
z%7^XWt$^}j`**vceAxcoc~CxV|L#U8AGUw@2$T=ozk40Zhwa~e1?9u`@BW4IVf%Ll
zj-Zam8=>(d(D+qo{8?!HeQ5j#XndxlsP?I#@x9RaIcWSzX#6c`{A+0ZZ)kk6W2p99
zq48tT_zh_MMQHpZX#8i#AoCrt{g;x*QSEa+4v~lL$1FyZpMl2TkH&w6#^*nQYQ6;;
zKOT(_%Fp=bTiQ;b&bLfH0hw=+MW1hZ2dN*0PeRHc1}NY3B=-84Q2Q2kK9noeJQyG3
zUQqjjQ2PdUo>T)=Ka3C4Pso1Q`BQtL`eA%x^~27q`UBMu;}feNcD|MMDM)z2_{8dm
zorhHe)eqyt^uIxCU%~iCPC?QmY(MbNQ>f`l^)y5twm&!w%7^U_?l_IA9%LWB^tJvp
zYWmu78j`*g(9+j}kC60bat0C}Fn-V(^3oUVe6TpEc`!c6y`c0(sJ#I@Piz`gKa3C4
zPbhuC&L4XS)eqyt^dq+qLG>*%AJkrgomVD)77`vXK1}}vwD?8lgY?7BH}i(-hw)+h
zk>{I0_9OE_`eEmxwL$g6_%Qv*`xij^k@+C~u=CSSLiNM=F#X8=Es%a>K1e_8ytRK&
z{V+bU`eEm@8J~lOAM`vnV)et$bIXA0hw)+h38f#{`ERqJ`eA&Seq{e*^FQpoxbsl`
zFg{E_a(V&jN9KdV4|cws>3N9%VSJc=<nd;Zeq=sKKkPiZXsCV|AEqDKK9GK7K1e_8
z{JJuzei$FFAH+Z!KSdtT1nGyJcXt7*AI1l11La?2^<eu!5>WF%`eEneX<vZ&AI1kM
z1L;Rze+kl$%m?X*ou^j=)eqyt^dqmo0_jKQgY?7B-`ft=597o1BijekkIV<@hn?3a
zd=cV)7$2q|xqk=JkIV<@hn?>i3Dpnd!}KGs{{-nr=7aRZ&I4Ql)eqyt^dqmo1?flT
zgY?7B4`jInaX*X?(~s<4kbY!7NI&enK`*F&7$2q|IlMsnk@+C~u=5FLLiNM=F#X8!
z1=5eq2kD2MXLt#!AI692CzSqR=O5}^hWH=GCsse~yu@~>ei)xv{jl>DA42uR_{8dm
zoyTZ%1>$}fAEuvB`wVt|V<}WWj8Ck7*m;i|q55Hbn0`X_FYJ8C=TQAHK3qSD0WB}E
z#XsykN!P0o|HJqoZJ_c4IlW-h4?BNy2~<Cf57Upl-UXx|nGZ^Tu=6T^LG{D<#OjBg
zZ|Qdp;(iz(rXM-Kf$T@-gY1W$hdBkRAI692M|LkrKQbSrA9jA`Yp8x0pIH5{^EMr>
zL);JJ!}Jphf7tn)^Pu`+d}8&(&hz{W)eqyt^dqld1^FMD5Ar|k{LjQ25ck9QF#X8k
z1=5eq2kE~c4N}R#uobEw#s`Ul(jT&VZ2Dp6i%Q*u*bn0qs~>hAX$@3Aj1SXKDEwjP
zm)?Tvhw+Kk4?FMF@)pGXFg{E_a(ffxe`G!={9xy!PKWA;@nQOr=L<mkk@+C~u=5zd
zLG{D<F#QDAv-*QnGBB9jhPWSgo+C&U6n});kFfI|i=g^pe3*Vh_QTGH+yd1P<HPjB
z#`i#ckpE%)S9c)eU$FDt{y_P#^WFIGLgZoRyD35Wu=Cwap?ui+ZXQrR?0mOqC?9sd
zTOO1TJKwDl%7>lrHU-Luo$t0B%7>lrwhPLKo$q!Q%7>lr_5jL<o$vM;%7>lr#&!?l
zAB}WSNH8!+Liw=s-E^RQ*!gbuP(JK@w;(7VcD`FGh|j~o06RZx+C9|%@;<0M>^!YE
zP(JKDEsOi8>Oro;*T0UwkJ`V^xDV-HE2H(VHz<NkVqkc2ACewne69!N^{-*)4NE<M
zm<Qv7YyqWjLg^EBK5-OOKa3C4PpE!}ooBojsvpLO=|}F5fYLWIAC$jg=O4d@>WA@(
z)ek!_S>qwp|4Ja8kn>_d{zvXlgX~A<gY1W$ubd9m595PO0qaNW-#Z}jLHc3mF|UW}
zhw)+h3H2{w=QqEE>WA@()ek%GS@{vf|1dtW`eElo$3peP_%Qv1%179F(o>-NVSJc=
zLjH%HKYb3WAI692C)B=&omZ{%7~+2zpIH3`(DSUjq55Hbn11B`I4J!f^Fi?sI}dv!
zR6mRl(~q3qLHd#TApNlOv(G^F!}u`$$o)Bxeq=sKKkU41#U~K|!}u`$$o(;peq=sK
zKkR(&5~zL{AEuvB`vP{J_eQ9G7$2q|**<Lc!_NO^c?xkqj1SXK$bQ&);f_%KFg{E_
zq4WbgUwl4PKa5YTe%N{B|DgI|d}8&(&M%L72Jt_PPpp2}dFQ*K`eA&ye#mexe0>ml
z`h%U1{sXEX#)s)Av_1`Xp1R|6i2GrDn0`X_2kiXyMyP%mAEuvB`2#zz{VY^Jj1SXK
z$bQ)Q?%XdR?uYT=`aujl{ZrU^@VQX^Fg{2dsQw_7eqrawUx4a|@nQN2^)F!O&0D;L
zxF5zRRzK`~`bkjzFg~&RVdvTZhU$m$iPaA~|32&$#QiWnOh2Lg3p+1=2UI_d57SR5
z`~{%r>r20e*bn2w^b-ny*m?XlQ2j7IvHD@>_g{nRhw+Kk4?FMQ{td+aFg~&RVfO>f
zg6fCyVfvBVOQ7-_nGdQzVD}09f$E3xiPaCge<1!X#QiWnOh0sa0KEJ{=7a3t0KKo^
zAXGn$57Up_p9HxdnGezrJFj2m9mIYZAEuvB`x$n=e-czbj1SXKsQiN62e1IDAI692
zCuBeD{(whN{V+aEKde6q;)DDT<4b&i^tWK=Uu!}6u=B5Np?ui+*8xyI?ELFwC?9tI
zbs3ZoJO8>9%7>kQJqOB%oqxR^%7>kQeF(~joqv55%7>kQ{Q}B|oqzos%7>kQ&G!-F
zAK3ZVicmi6{A&{^A9nt=JBZK2zyLchx#lBke{Tg;9(I26Ehry$ezNu_RP~_nMeZMh
zx(T4Gc3|fri!v}U@bfV+BtXw!wg>S+{Zr`q(|I61BLf5MeBa3+J|6=^22}lC5Fg~;
zZjen344`FXAoF48CkZk#Fo5{5{gR+-Q$Y50K=ns~<oOsF7C`xpP<{oJzXHmi0p*{E
z@+Uy~KcM^uC|{Wg;-3vrz8i?o!@w{Bx&S{9#Ajq+fXyHGfcSh23<Xg28$o=~dCJi9
zvTuX<Ag@99Z~g}H`4|{>K-DWTGcfS;Ffcew0x4o(@B{G~85m&w=K>I)kAYzYRQ+@i
zA7p+XMEzk9AH;;Nmv{=|b2l*DXafl{FtD;PFz|CXFc?736IX}wC7|-2Q2q=kKO4q}
zs_%vJC7}F`P`&|F{dFk+Kr6&PIaY{$8c==)h|ku*5M2S0?||~BLiwwpd<H2043uvN
z<%8}N=VxnR*aYPZurV-z_+?PO36w7j<wrsJspSy+KzAyG)GI*wui3zT=zq}WBg0=b
zz7#vEygC}+6pinR#*akfXQA<{(fB=R{CQ~n?P&bdX#AUK{8woFKWKab4pjfiqwx*U
z_^xRDP&9r98ovsS-;TzgipF1t#@~&`KZnMDfX4rX#^>Nf^`9gfUlWb*jK&X!@Il2K
zIEf@f_>jcJz`(EtjsFFWZ^{L>AFe(IjbDYvpMu6;i^l(n#&_XHHGd)+{}viwnFm#U
z1RB2=jei7<{}+vK!;5NO1sZ=X8vhX*Uz`tBzYiL}35~x4jsFgfufdOMUOpOs6&n8q
z8edHSRevfPe+e4@HX2_@5LLex8s7npUx~(_ipJlF#(#*$|AxkA7eck)8jYWi#$STQ
zzXj&QR#P)Dd;s&o%~EjsXA(v=Uk8ovgvPH!<L^S_e@Ek+h@hHZh{j)s#(#sx*AfNk
zN4Vbzjc*C&!&d7vFgQW@i2RAh&qw1opz)`n@fV}<cS88^^5Zlb{~j9uB^v)D8viF6
zpHU3tUxa@$*cliy*%=r>^-MN9149lw14Ax514AA=14BML14A)814Ai0149`*14B7G
z149)%0|TfIs$pkfsAXqhsAFefsAp$jXkcexXkuqzXl7?%XklkyXk}+$=wfGJ=w@eN
z0M${w><kQj><kS3><kPO*climu`@7CW@liS!p^`jm7RfM8ao5SOm+r_S?ml9v)LIK
z=CCs`%x7m{SisJ}u$Y~JVKqAg!y0x5hIQ-=4C~n$7&fpoFl=OJVA#aYz_6K}fnf_f
z1H(~v28Lto3=GHF85mBmGccTFXJ9zR&cFbw^Ukm{Fq~s&V7SQ6z;KD3f#EVcIC(l0
zurn|evNJFgu`@7~urn~Uu`@8VvokPsurn}pLezk|8d&i}b_RyEP&Hxf3=HAy3=9$M
z3=E(;Jc^xxA)1|mA%>lSVJB1#0|NtAbCAXNurn~6WoKYG&(6Sbft`Wj3OfVCNRA@~
zhWLO=&%Df%`0UK`%;Nl<veY7m`1mM4Lj#0>p@DOLUU5lLVrE{6p+QJxL8^aJR%&vI
zD}I%x2$k`{1*ysSm74~oCZ<%nmSHi|wW6RXwYWGlKaY^@NGgf39ibez?WUQa&`GUG
zPAw?O%+D*v=0fw(yzIRE@;pQ^VezY_Q({U`YGG++QEEzXNl|KEdPxR0WiF)!Iho0c
zC8_vLgIejDl35a#Sd^KVl#_~Snvo&gGy)2ZL-UFgOEQboz$W3d+9V{kC>MW3K$H-U
z0kArpL2niePLBAkf+-?w8B`@s>nvcwh~GklGQw8E)#A3a7@86YSPEB0grzXG$d-n9
z`k8<-v>}e93YW*3N)amHNeV5MAXIqfm82HsCFVdP(6y*2zX+T*5h`${UYH7e2^X#g
zZ<-D9^b0OAj*mAqG)+n@&P<LkE-A{)OHV3I^Gx;&jyE(+&PXhZ2Nh~1#h%Hou71Js
zmWeqz`N@eT`9&ZBSD2Hb##kU50}A8BT+&PmCJ+TMdE8L|SAjDM;3{xN0bB*nD1fTK
z8wD^m_@f}0I#B>OhO#IKsB|n!j}J&JDozb4Eyzg)Reh);j(I8Gsg>pVMJdG~Ntevz
zlK9}vDzHk|qM~@W{G!~%5|9*#=a*XU3ab`iV*bg=rA0-lDKLI;Y6+ySL=p%FCq%G7
zMRGi-k_6`vm_S%!PH8HNNK|T3zDs6VCa4C4OSt9aCziy!73Jr;<d-Hvd;<{!nFun>
zCqFMe-mw@V?VF!cnv?31nidbL@j*%;BJqwXDF|0WSn+N}sbGIYx$e&KE~z;nJqUr&
zypW>A<ZL9tfTGMiaQFnL7M7;wC8x$a=jSCuf($MM4}35q-Y>NrWCDZ}05uQf2e9Wr
z4F|AvWkG7Zdum>4QD(9uD4U`S!i)i_^DGW7O)9A@0DB-bFEcqmB{kl$I5fn~0;CNl
z0t!4>WW)zl#=9lv6sHE46qP2IFvNod{POeA_$8jX1v#m?sd**fN*t~-q^K03GBmF|
zGcP4RD7CmWHx=BT@rITmM#yCdv89K3a(-?>Vo|DVZf<~gaxSugMh5ZmWvR&}`9<+b
ziN&e$$@#gt`FWl`NrtYjAi0u^qWtps9B^ABJ{4RJLmF662N@a0gEYcRN0JRN0~MG#
zsg|Y1sqtxv$*Co-L4Lso@kW^`;L_6%YL7)`elf_RRI~KdlAQcxXd^1P#LzG&KRq)U
z;x=a^*8uNiu>XQf42_VT?P3Vm0xr|Z3P`iW;^NHoJS0mfFsmrFII}9%H5cYy@{O`A
zC@s#2PfASAcBR}fXG7N@?_kq-Xeg5Js>EPSgNz}`Hxpm&=jvjB8dbz;LW^IhrhL3M
z85fk6pavOH+Dy}ON{chFIugHqX2q#_B}J8>APzQ031On_g=$An%0%hK5vcgxOO{PJ
z#jYvHiH9iX<rKSCB5N{%CKLkhLrW<{>OxB@MCvj^jc`PxFf}n3XQR;7q_{E<QCfj>
z6OlSiGjo$lQ(c3+{et76r6i<CgjNN_8dI8<lV47j+s%qoOOi^{TmzDGkmH6(i_I#E
z(^E_G$k$y^3exROmgSbk8TsY8iFuXeg}pO*k?n%2%`_R*gz!vuh1X_~sDs8PTJC~Z
zamL2+@x`en@kxm(Ntq?_iFqmU$@zK3nJKA7@kOcN8W|-z;TlaqBMq(rkO~@N6<p3V
zIVUx-$TbToA}!&040}Vu70El6@bL?nU~q|LNorAUW?o`RD%eIue+io7@t5-8w2Yw#
ze||^PLtMs1(?n$Ih3Z0YMu37Z#51@A5guTXFh4_s`1s_Cio~SMGDA?qsW>w}IVZ8W
zI3AP);xqHo@*yUMVKp%$&(JI$*4H7<Td)E>gdDxl5-_C9AU@B?1a31?5eK)JEY0|B
z#vlCzY$l)?hs`90Hkzl2)D5*eB?po@^U6Vu@c5*v)S`Tbcu>MgjE4>#q{hQ@8cZfB
zH9a#gJ~^=@IRhq?3Mvzj1=5OAvFHSs6(Bw76&3LXsYS*4d5JlhC6)1I1|ZSmlEmcf
z_~eZ2__V}KP!S)G5J@jhECN;I@x>)6`K2WV4Dq?Cxyc2UV7fT9gaI@hl9QUpkW!hG
z5?@?WniQXunU?|^{56Vq_wjdf^ofu6i#Ieh291g2#^>gzq*kORy9SuY=Yz+8L5@hy
zNz6-+PfpCqiHDabpge^-fQ)LM1!CYRJ|z_#a;^cOF&U(BW^i`HXNV<|AtgnLWvNBQ
zshFk&xMjkf8&Kv7YZ#-9ZlgNR49Q$bp-ZyUOp%NzN-ZeDbQYlyGC?vSFSXn?z|as}
z5<$Wp?xSE6NGlYNcZ`ut$}d91U9b^EE2N$=q+nzPtsbPvC}@KawNRvj+o)$C%!OzL
z2{qhEMI)iEgjGq1N|Uq^Q1qxJ+Z-yn$GkW-H9J2o&9%ZWI3C_M0*j_(qPC969`yyA
zR*+u|8W1)~E66YQG&GEl&rK>WNi0c?FL6a_gOP2EWhtnz#n-eaEv76BK;s9XdcZXk
zT)05<HMucnkzbZt1a=;>aYiQb@dc$N@t~pe#N?9HqWEIyI3JECC}=b&-VoX@fmQG!
z@gWBBuCBr6@d4h!Ch?%UGzykd&En$=5>vp9W>B?NT7t`9P<3GmH8H6)Ek3g(wFo?N
z4>R2iVtTMee6U4)5OR8Pg%z8SF!BMl_rO&x>OdK;p(tlV_y7`gbc^g%U|d{M1nmpp
zF2aork&5tC_*5wAl?qB#P(ZqV<Qf+=E(4yxBwRTfnIz}um8BMy#Fym97p3MTf~HXu
zbK>(05VcVdbZW#R2rlnm0Etj64FO9e0}&2^wMv3O?H;rVZ1bYjl+t8S6ALzzjIbM=
zU66y{$PCGR(4<*Ba;U?s!O&)CkeisD3T_1zgS-9U*(c-N!~#!vg`EN#95u;FEiU#9
zHjW2PIvF6(I)P;i5;KcDy@QS8Q(VCSY!M`>8kxt(mw=jzNr^d$pwacjG*BkY%qvbU
zD)9tIg&9;u9%#rrDX}=!GoTD)Oj$@gtnmXe3#!l)l#z_$<I6y^4&XKhl50T@j5oB5
zkIzjkfU1CwMT49l4C;BrBl!kA><lV5ktWiDP2yc$UCmQci%W{~D<O%_5Xp_6!6xw_
z7b5x63^Jk(ov{SZ5u+Pw1Txgw2x){8GP{Y?fHZ?dIXEWb4Gj=e{g4C&4FsaC0*8|c
zBqZQv4JhnPp`JkzL=8~CV8eL$RIqD`E4EN@0*68lD5_jtL*hZRk*Rs0N&WcDG|z~{
z#Nr}Q<YyMgCl?eLn#ZT*7nLU#r67vEV9$70S5t5-78e<sffRz*Y?LOKWG3aLdIlST
z%rrEGITJSX3DRd-l$w@Wl$w{E>IyT%6(nu~>inVv7sRjNut4)7IQ&sO2?`wgB^~4p
zY>3*Tw?M=_T6Y5>0r4dsLo5+$K|zPy-9X5=lF{8j=#EG3ZXkpSI?WWJHXf<Ffe<BV
z0&)lyWhQ4N=OUHO!A9|rHUOT`F+o@sPg#Ej*`&mj6htu%>#z{%isI5Fvi0Vc=8&y7
zFEt&Z7hI?j>EM*iGK5C#o^{I1ODwAN%qz>!P6g#bQ|H_Q*FaF&h*G|}nq|V27{{CW
zf#$41<qEtK3J5le4>pH%|4<!Ajs@mHsgTUzif}$I8_Ym8WO5#)ra~@~gFyv4+=1XG
z94RdVONhgROOujwuow=iX;BTwh#05|zNI<XOaK+xuzDZtI1K$beFmzOAypVs_<+qI
z!-I|~DcGEc+YZv)i737)aVyTCzz9WBLIBxRNO*#LjhgNe36SIfMm8Q13{<rOn#2%^
zlmvTl#$yUqooHwT$w;V0j1Qz4h@4-kTf`8ufRrKzyBFMP?FB*>kmd!H(N{C1x)n6U
zS`2AEB1Z>C@<3|=Avaz?b5SLUc_pNqg6t4*Lz5^Y&{GsN8#tz<(6Hhn))La9g(R;)
z>RnKAjas<{o5hC^Q}^Qa0>W_6zy@sLsw+~l4XT1Ewg(nGwDAS9>D2N@aA^|llNCWr
zU_L{S5aL>0(3r<=3rfsGDnbfkod`P+wG_4OfrSz%b@>Iy)5bGMmLRu3NX;I;r8%@u
zR|GAAd4{5N<&s%Od#@0*gfy?9REQ>!W#uS+dFbMPf<0XWs9qvEso)U}lg!+L97Cus
z(ArPf`Uz;;6=}c;K3Rg=r$TnR5!9gg#FP}opf0!?g?RwpPa#mJ8yQ1=Qk0sTUxvI_
z1k@rV*)*8LG7^g^b(j&<VW8eS^5zt>++_+i4B|_aJuGCI2XkCTVll;r8CpUeS6rH&
zo?2W2S-4G+vq}q65=&BH^*N-a2a8Jr3BnNOwW8GGR7mTNSe=GYf25Rx#(hBR)gS@{
za}&%;kVY(y#h!x<jZsrSc!eDD7Bh&oV9kb>Sav$VGa6V4Mh>-1E=o--Ne#*`hmHLr
zrRX5gSgdhKVmibUZ|p`G8YgGu=NG32mx9+hfXy+8H-;H$7>_UkY=9B2Z4d;UYGj(2
zk`kU-lHpiXl9`s7oSB#di+f}{!7&fE2<%yqJ1oFk_1yA{pz6$mD|3_bb0EG#C`B&0
z3=N7)lZs0+OG-;1%MuOZVY)z30QDc5UC0Jl<QJ4==7JVIV^xnmu3$62kdVfz#-uzW
zu>?8|hHyH_mu8Te4bU*ZVLW772bTiS)CtTxP?bLU*_nohj>X0K$zk9%^3WLtM2iNL
z7{Eaa(+Qo1gwAPz2BrLhO;KlPOyf<!Gc=$<H1OPpt7~Qj$b~2iD&TV%!BFLp`U2T%
zqj(gn!97&ui4!x>90x1}K!fRq@$uzFi3Q*>f6oB#V32~KU?)gt0&PA6)mjvbQZQ^X
zC1evwK~S(G#3pbI;7;+V5sp(Ga-<XQ_5i<NV@N*4@Ps)bPk<Bz1v^7Lf!9iu!pJWe
z8d1nu0i6E_trHB5;^RS$g7~7uy!2GpfME0ZU?&hlWNu_HLRR`4AP0<(2}nP9C>4_0
zA;}k6mn*)23ATuLbqz6y4~Y+Uj1RVi7!6K1hL%`%rXfwK<5q-PmSQyrw!+X4R@z{(
z3pwF|Edyr`<j6HNgS0k~s}0a9ZWP}m1tsQ|N;IcpQGl9Du<Xht!kM7T1EmoIDwdHa
z8WC>9v6m0co!B-WB9xF&oFa#&1;~k>parzp3Rg4K!qp5~T7n8!P@&_BR=9#A43}q6
z!xV=qjBqu?>T6W9u&O~e32$+NrO?M7K(s7SD0Bq=A_d}o9EAo6HsdZfFf*wIB$I+t
z7fPE6DFYzVzo8|T-L3<kv`tfs5{pw^{esQnL2JyAD)(Tgc*yo+Xt|5k`RHjBix-j8
zC^+w6hNBTm?*(@ef<<RaX=08)tW5~sL;!IvhEh{Vp9D#XX-aBRX}T%6;{ck<#L(gj
z$_(HYG=>;^{Y<d!-ZnulNs>Y9D1yA91E5GgH*^8n>yesMkXmE{vK`ufM$V9+MQae7
z(H5>HgBpVv8^@6&3gQi1mVlbHv@zTSOW_=hloOC!$Ed|Hs0j;7fJiM*5EoMG2b+Kv
z4S~Ya736L+jHP_ZAhi^P2&~;51a%bpQbEc>JRYUSCd&@oV>yOqu%%~+fhgD<F}UEt
zqZPF753(n>JT);JJpN+=GY!&(!=u?0rZPS^H6^n&mu&5z6<K-dBx*N@+g=1c%Y;M&
z5YC76)6ujOJN{*81UiNU5%ll@WP)K2)`zlV05bAS`q~`>uu+IrI}mdSIKUFD3BFE;
zC_PZmlCw0&0&E=-!=1QO9K;CFa2`Bzu&+-+9_oZ>B5@TCL=%b2Zy=gTS$6}`LD7O6
zNVHLR8XU5e2aCz1jFlrVWPlijI{irU$_T`46v{jec=;+kJ%CNcNcfm;0S%5K+Ji_F
zN?-#Ztp`J*76IdM7Jdtea3S&}5y_51_AtVmAU8pxf(jl+ECC|M0-PQOuN$G7qabTq
z@R*6=4(!V`APYgD&Z6onHk>AtlDDus8nX0*7LLYgGHH%RE|v|A!Sz8I?8Goc+Qbq?
zC>qVc8k6%=(?A2&sn7)^5N{LG3#lj(YesPD!oR`-qK$fmF0>4RE=9p&I%(ldoZ}G_
z_^1UldFw)8j)t3xTz=u0<VJWKr9=lM1w@&GUQ}U;Vxs&D3qp`pRP->Cu|)V7Gvh+T
z0J{DG7K+qbH%riTnDZ%`$VYQLJPCjz64$~=97%%M0D{{=y+DTr1t@`lW;u}8wIL-!
z${dbn3D#szstwT98Z=5fxJ`%og`%_rU7tZC$Ky7gG{>V>K(JvxlujQaDzTJMDEiIe
z`cXQ62m?s(0Ghy!M05fX<`Hs`F<dviH%Po@GlXVxHf$KdwNunJG(?y~V*e0HCyAXz
zB%LJo6p?h2(p^N-Mp2&;5xLZB!XUPQ;4qz(42Qk@f?8`+sZWGle~@e>w0gzX6~ZtV
zBNM_r3GS^R1sVmL2cYgj>ILC87sEZ6RWB$`A^jC3V`$JD!eKfoDF(aaA^jE{##6i@
z39sp-IUc2cKorM$iMf!YL=axU5^X4&5yf$FYKb4Fe)vcU@dhR5=ar;ZKvr@Q?+{CP
z!CstN;*8Y@qTPb1-Vj|kye`4tBS2`SUik;jdeAN$4jV{I6vTT1-WsN6iyMnq5cZ&!
z#pHDvkpc-8FjQ`PV|oGEB8vO~?eie{focUKG%4Y?0p<~k8sAu~KtwToB7^eI4{|6W
zETD4u!$O40X$UinkS(If574d^a_CUG<B#74m`5l|L(nc34LyP122wmh+<_W;$@w`c
zdWIMkBq_R$z`7yJQQ%83A=Ce$;Y-lI2c(H^a4K?z9V3BkKdB~wmM|EO$kQQ^Lk$wp
z=sP9|ulFWw29XP;Q4GSeBm(R}M3$lQ@?R9&LFdLmR)avdR)98-5Vf+92xADY{6jH=
zgn&R^21BiddoZ`SJ3B)UM~p849c*IAfG%Q!A!3RlVum4NWPmAzX||COrYd7hAxjJ)
zV@%_XF^xCIG~O7~cw<c08=GU=Xn`SQgehc<8CoWo!DWJJqY0*sCYZ*XV1|S#W=NP~
zhJ-0*NSI*}!VDZUOb?l1ddLjZsb-i?HOF+SIi^!F0|L`<LrjwlF-<bW^o}8B$Qxo>
zXozW}5vE3C%y2Nq^t1`4BTO+hnqx+WIi^N)EE+N0Wr-O$mY9KKiD{N8rj4dpq94;n
zGt3|~$JA(w8LFn3;bn&DdNXkIBP0yA9F2lw9l`6kLPDr!k~1|-G5{X}g>mXFxFG~;
zI)zZt7U<$h_%YH{wgi0QGUkFrge~w$B=-zNNDPKSSEXRviH4#A=T;XKRao~epsK*!
zE`TDBV_`dr8mtS}(NthrE(})ThkAH~WiI&En3U8sY_f)CpoR{3Gg?u6aZY|oF;o@u
z^>HZ2aiJV%2)^cpC~bzwhX&`SmSp6oK&(PL=?}#&L*mcTbv4T_&&ezXUHpR2L?c6_
z(>O?RJ(4!!U5}&*|1=Get~WG?Z0rPIIz(J_8Nzm3BfA>!xy<kdq9g<lVo_*%Y6)oj
zDyT5RXA5HSEwS!01$6*P@i=6GDYEhS5(Z>VA$4L2F@{S*5F$pPiFGp4ycQ|$M$KUs
zNckUbv3@~OW?5oMs(x8wk$zf!PD*M~vA&UszD2f)MOKbkijj$Bd|rHET19zrj#))n
zu7QDpL3*Bkh<-t4fo@T1p>9%XW=@K(Wkr0Ffn`~NenF)^1HvA?<bnbQYFh!eQNJWL
zw?Hp9F*(1uLf6nh&r-L-!Ytm*MAul)9OMWjH{^pZk^)_$g?w>lL8U&D5|A^&S81Y#
z7|zqw^-|N*i}jNW3MzFC^$hh4bPFnt^~^z{U_t%NyyTqHlvD_(prl9-a-KZ41K81f
zR^Vn0bqaP=&tQ#y@DW4c+d>B}s*v0OI-?&g>mnW70KX-%pi&=66Fl;8R%9qO67FN{
z^}tC&zc?eYC^aP|u_RG1gF)Z10CLAC=t5Eb2=Mh}hDLgc1qJ%xDVw~KVttp?vecaX
zg481Yko^3d<c!43ykdw_m(;Yx(wq{#isTZQjDBfxk$zHUp1z(QBAIAw>z5hoLryrQ
ztChK_x%owvbhQ$4xg%Yz%mdw9hkjEoYVJTjF&Smi3ChM4($}5P=Z*q;rqW_it06I$
zt`P|Bnb6nH{9?M=2R$L30a^+n6>#(|g);Mt)5;;|GT`25LPB$$p`cP9bUJY{olDci
zob>#n%#w^;ScrnsE$AS0y$l9WO-*$D4Ze&K<a%%m0e2;Xa%el+{T<LqMVw9!ZV|xG
z(x+hX0=_jCG7&O*r$;&{o{7Kk7Z!f#H#AWaC9v(j&?q5yR0HNT*l-fr=1|E!;QKc4
z-RVKsYN(_LavdN8C|gn((qNwr?VCjC8TRp@E2%Q`(y0;jbiKZWo`D2AzMrm*i1>KO
zEx*VyL$`}s=oz{g-o$q|3_TrIT#^F0R}LBlsFx!n9iKCLBMo>zIZFQu9A?O^PKs}&
zDX7#hEy>J*Hn)(CHl*_1H1telkjp*jT(YE<<|RWf1OnYwLs#2NN(*uz_p>8MA#v-C
zVC{d<iQIJVgT%)d7vyA?#3z-erKJ|pH^eFnQbE_BWR?_T<SLroe?-qbkegTl_86q;
z2fA`+^uDDc(B(m{uJDC&-~a+Ir9v)dNAFu2vG*+zS7O4}fq{Bfq?fl4-J|O}>3#bb
zB-}9<^<Y_agRyE2bNSA2T&ou!9~BY|8%?2LfCs#A0$~!l<1!Fq;EP`<cF^bwCg?nY
zXEI`K5mlB+5nRFKNwri3PY#r&D!79vG6v7O4&oOFA<f_#nu3-|_=At?jt?$KO$KKH
zJhq_B+`<<v#K%XW%@U(d=22rU5wfcd5UzG*KpMjb%{&jt>INh?V4E^WEJi|}GDp$`
zayWYBj=Gl25c#4()G2d>=>%gB`8Ff8Syd!c$e2|{=pM{hF&0$n7lRLb&rBv_{s{R#
zMf|ILz|DH(h8bGiA)7?%W`AV8q;B^|)=TP!e`LKRZTUymh;`FHq=QKI&Cevw4d5Gv
z1ZBA35}f0KsPT!uwH36}9CGR$>Ul9#+fzx51*GgGgKgf&*l-K3n&BIp$vsg3vV$Mf
z@X-q-Gs`m}i+pj$04UTDTdR;ZuaX=YIIX}q+yS~Yo2UZ=kxhjhdjoPYYPzT33<zZ7
z5y3!JD-au$p?hsfum_`Xhq)Jg3<lLg0sDCnki#rcEum@=L&ySBiWux(fE+qOLoX1r
zfHW_lo=t(&t^o~KLvHFp@e4-sK&$(an_S>EM57l-f^J|ojwdZz(Cc4lj)tC>fD$9F
z&~s8iSsk@<Bk$A)oQ6~D=0&WYfCUe2e1U8_wY&j6cK|K8sC+&EK}%phqv(hL?6x2d
zf}r9_6KKJOVh5s@LTVI{de|OTzraEXl)7l<86-=P+aIKQ0D2?@T0l`bT@kbd<{668
z74%>T+IoecC8T&|bb<nWX;@-Oei8Vj3ZwXV@QF_Gpe3}PpkojanG}@BLFaH9LY4!>
z=jErQdImei2b8(G=0L832m$RPwM@+e?c$HmO!JILOe`(}&6Q>r$0rZo2?{LJ5t#dg
zvF+N&oa?}{Ykzd}eL)H2U<d4*??D3<sG||cja1_IRU(}aOUA%2Qd5}tfnOv|_=nJN
zZ^J}(HOjy*q*py~`=p@uV-EO2FAqf?>xHVtI?ijFnU`4tJ>LVgs|k9aBkmZ7Z!AR~
z2Zmbe3R_(RwJ`&g4?1B6+uAGC@p|aFEAjZF3}$jsemT@g@X7{^qgz~zT+3i;k*<`a
z^U@#Cs<X_JN^o}#>`vqr81R+dxC4Fk0%FJpXv9DdI4JQ1*yshs@#vWnc6k73%L{aK
zBX~XnntyNy=5V^A7`pcclpzt}fU>Cvl0UH>8v)O#U?mtu&FBTj*u!P?0^{L-fiVSB
zU62_$_^E%RHz1?kDd`<NdIK`@MZ`#JF-C7dF33SX`j4n{3c)QWa7m5j`eM{74x0kl
z_Ce@X&6tNC5pj1Z!E+E1Zp3kF5}G@)-UVz3?aN@hWE1Rv#NY{NC<UWFr`AD=ggt{A
zrZ~Ke5w2Jdi^8}{+bJ<62zujVa7hvP_DW=v@Rq*F0fgl?bwmJxE`3G5#}!;;fldoA
zPAw_{o!&}8nMR=_MsGkijHi0i9=!n>bO&)UXiyd-Y^ikvGPI`xzHk}ZMFW>w5DRhL
zRg7F6fbRY!{X%3&i9qWWQbc%V^x9?g+pY)wwaZAc3LVA+AD?WTn3CcQ-e!}SnO6e7
zx*PS5XV`89&|oumGe%F6M$XxW1Y1xjAv#D-k_N>tq;!Gz_8^fBI^qx1O2Bf)JE({q
z*(bE4p9zO~k}l?K*Z`b<jCm-p5$0jR*v?4C+@nO~q1>3q6B}cO1aaprV>w$J%X!#X
zPG!b&`1ODs)C}3o1kO!{*oH2_i}OQ*Aqy`FoOlaWh3CXum<r6!AF4fuE~Sw3@={$>
zGD~1L9HFSe+KE9^f$PNE5a_rX9v?$h5b!az&I*Rwh9~6UHhJciq!#5R=0N8)ii+}!
zAijo~fh%lbCg75X>c`QEM70B3w;7@WY71Udpla~MAk-~}mN;V&ZU!zD__hNPb1E)$
z5`^?AXM*SF!c0b)76w&uC=)3-Gb4(}@TX4{rC7#fz)^;x2w%LRD8wB#Xv(k*Be*hv
z4%Y%7q6V5;9+16Ha2H@sgj>MmP^QA6Lm=RZdyL3Jorp%9RSci%HjmE-Z&w2k>7c|j
zs#ar|-SOqAiP_+zP2kSKj4iZ9yNE-Tb5m0?OLNKAUR+X?pO;Rec5}GXi=e|sBpQHl
zesWQ2VhQv{KA5RQ&)^zb!nEK${23msq~~yiL#cOoHNrV4#R6#gHaN)*NG1W>j+sHg
zyD5+}2Ur~_D-*~VkQ27ynF79qlVGj@wevy8kd&ns6+_Ym*bT0v@A-q=Opmy|7h(<p
z2UvpbhhN%DlpaHfXN$<Vte2Rh#R<3=Vg!l1_8^)_+^z@FMB+X@h$d1t=s|Rhgq?Yi
z)CC#?FU!w{Y&w8$4Ipn3FJem`7L!R?R14cd1lr>SF$#5`0?GTnpt~UuoA98f60!RZ
z(=8xZAx8X>R-RMKS@<o$$d8yV1TD0MwF5|Y6tZy$Z-U$e2{bBr7~A#AxGlixVekes
zsyPahIPjQ>;STKkxga~npw6PfUNkHwlajZvI~uY_42#he7YH~_Ce6{vD|!u$!L=l8
zu^71V2XQ)<C_>R_2G*FIpPH7InVgxL2e|^7IK7bi9K5(4zm52}VnHmUUZD#u+Ms)+
zu$WF-I1}f1#4a4vf|<MxWH3j=O+_xhaO{RbcpGJl4yY7C6k6y-6_(^dlz(AC2(pTb
z9!4^j2p?l+Txb|TH!#6Mk;)r?aGMTuK6#A=?2d;g0Z>GuZUaJe%WxzKVgm?n2lWCS
z78IZa0@~q%v?d=tbV48tg;0CuI8q^M*rQp3HJOtdG|+8bG)g<TO^5jfJ#>Q2NeCY3
zMlKpT9=GYFIDT}0*y#SSoK!5AtkQZ@7v`K5w&^0w`7dmjWn%79!*+ct<|a5SS88Ip
zWEI=>sl?qRYJ#OgM``a7zL^bNc#u9Y2N`l9cApnyNCE$DA)Fh$ATC51tAn+C2X4F$
z?gGp)J9xc>JZ=Y9PsG?A;<`EH@jJM&1P7<hVY7Is<9G-INFU2Hfg6b!&qJ6;C>0vR
zb;Aeth}Vn=4RQwfh`xxAkROcTejstE56J~2j`tzyByrFWNhc{Ie@NP}4*%hAvmr*m
z5Rps0ZXQ9?2{lR}t6C`71cB^(>|;czT_Y+DJRvvINj4JN$-p+=gkdg54uE+QJS2n^
zr-+USx$Z$4YQk+UhI=skH=slT8E!%{h6Y1UI7}y$<d9PgcE>{ohj18ArJ*MrrjzD)
zlvV|zJkLwag-jA5EWr|OD4G%Fd2woqAEtizNpr*-l$@Vel3D>7%Oc((h^t_WQ%k^i
zLt+>~v|AAMBVv3EuS;-rc%h?M2(8pBKoLa&Xc-?48%RqO#Crlh<Uq|~4J?6$um`m)
zCU0C0DUe_RL*)SuOfMi?M3En$V<$*{pjyEQO-lG}fO&+X5e_U?AfgyPsf{$uN$U6y
zUJIxk{;&|CavH+)0<uLE`2jk9MZ+|N-v*dRC`v=naV#2o0>2HUcw%(4Y~+oWVGbJ^
zVUGR~x!xLcpo+-#)|d;mO^F*o!?Y305^Ph<Mc<~Dn0H!Wu1`0E-mZ@O5?4ek4s_;w
nd~r^GNipUK2|V+YKGKC~uM#`jh188EbI1$fT$Eui1~32sTe$#)

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/matplotlib-3.4.2-py3.7-nspkg.pth b/venv/lib/python3.7/site-packages/matplotlib-3.4.2-py3.7-nspkg.pth
new file mode 100644
index 00000000..2137841f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/matplotlib-3.4.2-py3.7-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('mpl_toolkits',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('mpl_toolkits', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('mpl_toolkits', [os.path.dirname(p)])));m = m or sys.modules.setdefault('mpl_toolkits', types.ModuleType('mpl_toolkits'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/lib/python3.7/site-packages/protobuf-3.17.0-py3.7-nspkg.pth b/venv/lib/python3.7/site-packages/protobuf-3.17.0-py3.7-nspkg.pth
new file mode 100644
index 00000000..baef7a0f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/protobuf-3.17.0-py3.7-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/venv/lib/python3.7/site-packages/pylab.py b/venv/lib/python3.7/site-packages/pylab.py
new file mode 100644
index 00000000..f9d135d3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pylab.py
@@ -0,0 +1,3 @@
+from matplotlib.pylab import *
+import matplotlib.pylab
+__doc__ = matplotlib.pylab.__doc__
diff --git a/venv/lib/python3.7/site-packages/pyparsing.py b/venv/lib/python3.7/site-packages/pyparsing.py
new file mode 100644
index 00000000..581d5bbb
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pyparsing.py
@@ -0,0 +1,7107 @@
+# -*- coding: utf-8 -*-
+# module pyparsing.py
+#
+# Copyright (c) 2003-2019  Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and
+executing simple grammars, vs. the traditional lex/yacc approach, or the
+use of regular expressions.  With pyparsing, you don't need to learn
+a new syntax for defining grammars or matching expressions - the parsing
+module provides a library of classes that you use to construct the
+grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+``"<salutation>, <addressee>!"``), built up using :class:`Word`,
+:class:`Literal`, and :class:`And` elements
+(the :class:`'+'<ParserElement.__add__>` operators create :class:`And` expressions,
+and the strings are auto-converted to :class:`Literal` expressions)::
+
+    from pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the
+self-explanatory class names, and the use of '+', '|' and '^' operators.
+
+The :class:`ParseResults` object returned from
+:class:`ParserElement.parseString` can be
+accessed as a nested list, a dictionary, or an object with named
+attributes.
+
+The pyparsing module handles some of the problems that are typically
+vexing when writing text parsers:
+
+  - extra or missing whitespace (the above program will also handle
+    "Hello,World!", "Hello  ,  World  !", etc.)
+  - quoted strings
+  - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes :class:`ParserElement` and :class:`ParseResults` to
+see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+
+ - construct literal match expressions from :class:`Literal` and
+   :class:`CaselessLiteral` classes
+ - construct character word-group expressions using the :class:`Word`
+   class
+ - see how to create repetitive expressions using :class:`ZeroOrMore`
+   and :class:`OneOrMore` classes
+ - use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`,
+   and :class:`'&'<Each>` operators to combine simple expressions into
+   more complex ones
+ - associate names with your parsed results using
+   :class:`ParserElement.setResultsName`
+ - access the parsed data, which is returned as a :class:`ParseResults`
+   object
+ - find some helpful expression short-cuts like :class:`delimitedList`
+   and :class:`oneOf`
+ - find more useful common expressions in the :class:`pyparsing_common`
+   namespace class
+"""
+
+__version__ = "2.4.7"
+__versionTime__ = "30 Mar 2020 00:43 UTC"
+__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+from operator import itemgetter
+import itertools
+from functools import wraps
+from contextlib import contextmanager
+
+try:
+    # Python 3
+    from itertools import filterfalse
+except ImportError:
+    from itertools import ifilterfalse as filterfalse
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    # Python 3
+    from collections.abc import Iterable
+    from collections.abc import MutableMapping, Mapping
+except ImportError:
+    # Python 2.7
+    from collections import Iterable
+    from collections import MutableMapping, Mapping
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+try:
+    from types import SimpleNamespace
+except ImportError:
+    class SimpleNamespace: pass
+
+# version compatibility configuration
+__compat__ = SimpleNamespace()
+__compat__.__doc__ = """
+    A cross-version compatibility configuration for pyparsing features that will be
+    released in a future version. By setting values in this configuration to True,
+    those features can be enabled in prior versions for compatibility development
+    and testing.
+
+     - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping
+       of results names when an And expression is nested within an Or or MatchFirst; set to
+       True to enable bugfix released in pyparsing 2.3.0, or False to preserve
+       pre-2.3.0 handling of named results
+"""
+__compat__.collect_all_And_tokens = True
+
+__diag__ = SimpleNamespace()
+__diag__.__doc__ = """
+Diagnostic configuration (all default to False)
+     - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results
+       name is defined on a MatchFirst or Or expression with one or more And subexpressions
+       (only warns if __compat__.collect_all_And_tokens is False)
+     - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results
+       name is defined on a containing expression with ungrouped subexpressions that also
+       have results names
+     - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined
+       with a results name, but has no contents defined
+     - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is
+       incorrectly called with multiple str arguments
+     - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent
+       calls to ParserElement.setName()
+"""
+__diag__.warn_multiple_tokens_in_named_alternation = False
+__diag__.warn_ungrouped_named_tokens_in_collection = False
+__diag__.warn_name_set_on_empty_Forward = False
+__diag__.warn_on_multiple_string_args_to_oneof = False
+__diag__.enable_debug_on_named_expressions = False
+__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")]
+
+def _enable_all_warnings():
+    __diag__.warn_multiple_tokens_in_named_alternation = True
+    __diag__.warn_ungrouped_named_tokens_in_collection = True
+    __diag__.warn_name_set_on_empty_Forward = True
+    __diag__.warn_on_multiple_string_args_to_oneof = True
+__diag__.enable_all_warnings = _enable_all_warnings
+
+
+__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__',
+           'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+           'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+           'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+           'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+           'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+           'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+           'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char',
+           'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+           'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+           'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+           'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+           'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+           'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+           'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+           'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+           'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+           'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass',
+           'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set',
+           'conditionAsParseAction', 're',
+           ]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    unicode = str
+    _ustr = str
+
+    # build list of single arg builtins, that can be used as parse actions
+    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode
+        friendly. It first tries str(obj). If that fails with
+        a UnicodeEncodeError, then it tries unicode(obj). It then
+        < returns the unicode object | encodes it with the default
+        encoding | ... >.
+        """
+        if isinstance(obj, unicode):
+            return obj
+
+        try:
+            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+            # it won't break any existing code.
+            return str(obj)
+
+        except UnicodeEncodeError:
+            # Else encode it
+            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+            xmlcharref = Regex(r'&#\d+;')
+            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+            return xmlcharref.transformString(ret)
+
+    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+    singleArgBuiltins = []
+    import __builtin__
+
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__, fname))
+        except AttributeError:
+            continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+
+    # ampersand must be replaced first
+    from_symbols = '&><"\''
+    to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split())
+    for from_, to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+    return data
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+
+def conditionAsParseAction(fn, message=None, fatal=False):
+    msg = message if message is not None else "failed user-defined condition"
+    exc_type = ParseFatalException if fatal else ParseException
+    fn = _trim_arity(fn)
+
+    @wraps(fn)
+    def pa(s, l, t):
+        if not bool(fn(s, l, t)):
+            raise exc_type(s, l, msg)
+
+    return pa
+
+class ParseBaseException(Exception):
+    """base exception class for all parsing runtime exceptions"""
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__(self, pstr, loc=0, msg=None, elem=None):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__(self, aname):
+        """supported attributes by name are:
+           - lineno - returns the line number of the exception text
+           - col - returns the column number of the exception text
+           - line - returns the line containing the exception text
+        """
+        if aname == "lineno":
+            return lineno(self.loc, self.pstr)
+        elif aname in ("col", "column"):
+            return col(self.loc, self.pstr)
+        elif aname == "line":
+            return line(self.loc, self.pstr)
+        else:
+            raise AttributeError(aname)
+
+    def __str__(self):
+        if self.pstr:
+            if self.loc >= len(self.pstr):
+                foundstr = ', found end of text'
+            else:
+                foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\')
+        else:
+            foundstr = ''
+        return ("%s%s  (at char %d), (line:%d, col:%d)" %
+                   (self.msg, foundstr, self.loc, self.lineno, self.column))
+    def __repr__(self):
+        return _ustr(self)
+    def markInputline(self, markerString=">!<"):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[:line_column],
+                                markerString, line_str[line_column:]))
+        return line_str.strip()
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+    """
+    Exception thrown when parse expressions don't match class;
+    supported attributes by name are:
+    - lineno - returns the line number of the exception text
+    - col - returns the column number of the exception text
+    - line - returns the line containing the exception text
+
+    Example::
+
+        try:
+            Word(nums).setName("integer").parseString("ABC")
+        except ParseException as pe:
+            print(pe)
+            print("column: {}".format(pe.col))
+
+    prints::
+
+       Expected integer (at char 0), (line:1, col:1)
+        column: 1
+
+    """
+
+    @staticmethod
+    def explain(exc, depth=16):
+        """
+        Method to take an exception and translate the Python internal traceback into a list
+        of the pyparsing expressions that caused the exception to be raised.
+
+        Parameters:
+
+         - exc - exception raised during parsing (need not be a ParseException, in support
+           of Python exceptions that might be raised in a parse action)
+         - depth (default=16) - number of levels back in the stack trace to list expression
+           and function names; if None, the full stack trace names will be listed; if 0, only
+           the failing input line, marker, and exception string will be shown
+
+        Returns a multi-line string listing the ParserElements and/or function names in the
+        exception's stack trace.
+
+        Note: the diagnostic output will include string representations of the expressions
+        that failed to parse. These representations will be more helpful if you use `setName` to
+        give identifiable names to your expressions. Otherwise they will use the default string
+        forms, which may be cryptic to read.
+
+        explain() is only supported under Python 3.
+        """
+        import inspect
+
+        if depth is None:
+            depth = sys.getrecursionlimit()
+        ret = []
+        if isinstance(exc, ParseBaseException):
+            ret.append(exc.line)
+            ret.append(' ' * (exc.col - 1) + '^')
+        ret.append("{0}: {1}".format(type(exc).__name__, exc))
+
+        if depth > 0:
+            callers = inspect.getinnerframes(exc.__traceback__, context=depth)
+            seen = set()
+            for i, ff in enumerate(callers[-depth:]):
+                frm = ff[0]
+
+                f_self = frm.f_locals.get('self', None)
+                if isinstance(f_self, ParserElement):
+                    if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'):
+                        continue
+                    if f_self in seen:
+                        continue
+                    seen.add(f_self)
+
+                    self_type = type(f_self)
+                    ret.append("{0}.{1} - {2}".format(self_type.__module__,
+                                                      self_type.__name__,
+                                                      f_self))
+                elif f_self is not None:
+                    self_type = type(f_self)
+                    ret.append("{0}.{1}".format(self_type.__module__,
+                                                self_type.__name__))
+                else:
+                    code = frm.f_code
+                    if code.co_name in ('wrapper', '<module>'):
+                        continue
+
+                    ret.append("{0}".format(code.co_name))
+
+                depth -= 1
+                if not depth:
+                    break
+
+        return '\n'.join(ret)
+
+
+class ParseFatalException(ParseBaseException):
+    """user-throwable exception thrown when inconsistent parse content
+       is found; stops all parsing immediately"""
+    pass
+
+class ParseSyntaxException(ParseFatalException):
+    """just like :class:`ParseFatalException`, but thrown internally
+    when an :class:`ErrorStop<And._ErrorStop>` ('-' operator) indicates
+    that parsing is to stop immediately because an unbacktrackable
+    syntax error has been found.
+    """
+    pass
+
+#~ class ReparseException(ParseBaseException):
+    #~ """Experimental class - parse actions can raise this exception to cause
+       #~ pyparsing to reparse the input string:
+        #~ - with a modified input string, and/or
+        #~ - with a modified start location
+       #~ Set the values of the ReparseException in the constructor, and raise the
+       #~ exception in a parse action to cause pyparsing to use the new string/location.
+       #~ Setting the values as None causes no change to be made.
+       #~ """
+    #~ def __init_( self, newstring, restartLoc ):
+        #~ self.newParseText = newstring
+        #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+    """exception thrown by :class:`ParserElement.validate` if the
+    grammar could be improperly recursive
+    """
+    def __init__(self, parseElementList):
+        self.parseElementTrace = parseElementList
+
+    def __str__(self):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+    def __init__(self, p1, p2):
+        self.tup = (p1, p2)
+    def __getitem__(self, i):
+        return self.tup[i]
+    def __repr__(self):
+        return repr(self.tup[0])
+    def setOffset(self, i):
+        self.tup = (self.tup[0], i)
+
+class ParseResults(object):
+    """Structured parse results, to provide multiple means of access to
+    the parsed data:
+
+       - as a list (``len(results)``)
+       - by list index (``results[0], results[1]``, etc.)
+       - by attribute (``results.<resultsName>`` - see :class:`ParserElement.setResultsName`)
+
+    Example::
+
+        integer = Word(nums)
+        date_str = (integer.setResultsName("year") + '/'
+                        + integer.setResultsName("month") + '/'
+                        + integer.setResultsName("day"))
+        # equivalent form:
+        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+        # parseString returns a ParseResults object
+        result = date_str.parseString("1999/12/31")
+
+        def test(s, fn=repr):
+            print("%s -> %s" % (s, fn(eval(s))))
+        test("list(result)")
+        test("result[0]")
+        test("result['month']")
+        test("result.day")
+        test("'month' in result")
+        test("'minutes' in result")
+        test("result.dump()", str)
+
+    prints::
+
+        list(result) -> ['1999', '/', '12', '/', '31']
+        result[0] -> '1999'
+        result['month'] -> '12'
+        result.day -> '31'
+        'month' in result -> True
+        'minutes' in result -> False
+        result.dump() -> ['1999', '/', '12', '/', '31']
+        - day: 31
+        - month: 12
+        - year: 1999
+    """
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj.__doinit = True
+        return retobj
+
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance):
+        if self.__doinit:
+            self.__doinit = False
+            self.__name = None
+            self.__parent = None
+            self.__accumNames = {}
+            self.__asList = asList
+            self.__modal = modal
+            if toklist is None:
+                toklist = []
+            if isinstance(toklist, list):
+                self.__toklist = toklist[:]
+            elif isinstance(toklist, _generatorType):
+                self.__toklist = list(toklist)
+            else:
+                self.__toklist = [toklist]
+            self.__tokdict = dict()
+
+        if name is not None and name:
+            if not modal:
+                self.__accumNames[name] = 0
+            if isinstance(name, int):
+                name = _ustr(name)  # will always return a str, but use _ustr for consistency
+            self.__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])):
+                if isinstance(toklist, basestring):
+                    toklist = [toklist]
+                if asList:
+                    if isinstance(toklist, ParseResults):
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0)
+                    else:
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0)
+                    self[name].__name = name
+                else:
+                    try:
+                        self[name] = toklist[0]
+                    except (KeyError, TypeError, IndexError):
+                        self[name] = toklist
+
+    def __getitem__(self, i):
+        if isinstance(i, (int, slice)):
+            return self.__toklist[i]
+        else:
+            if i not in self.__accumNames:
+                return self.__tokdict[i][-1][0]
+            else:
+                return ParseResults([v[0] for v in self.__tokdict[i]])
+
+    def __setitem__(self, k, v, isinstance=isinstance):
+        if isinstance(v, _ParseResultsWithOffset):
+            self.__tokdict[k] = self.__tokdict.get(k, list()) + [v]
+            sub = v[0]
+        elif isinstance(k, (int, slice)):
+            self.__toklist[k] = v
+            sub = v
+        else:
+            self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)]
+            sub = v
+        if isinstance(sub, ParseResults):
+            sub.__parent = wkref(self)
+
+    def __delitem__(self, i):
+        if isinstance(i, (int, slice)):
+            mylen = len(self.__toklist)
+            del self.__toklist[i]
+
+            # convert int to slice
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i + 1)
+            # get removed indices
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            # fixup indices in token dictionary
+            for name, occurrences in self.__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+        else:
+            del self.__tokdict[i]
+
+    def __contains__(self, k):
+        return k in self.__tokdict
+
+    def __len__(self):
+        return len(self.__toklist)
+
+    def __bool__(self):
+        return (not not self.__toklist)
+    __nonzero__ = __bool__
+
+    def __iter__(self):
+        return iter(self.__toklist)
+
+    def __reversed__(self):
+        return iter(self.__toklist[::-1])
+
+    def _iterkeys(self):
+        if hasattr(self.__tokdict, "iterkeys"):
+            return self.__tokdict.iterkeys()
+        else:
+            return iter(self.__tokdict)
+
+    def _itervalues(self):
+        return (self[k] for k in self._iterkeys())
+
+    def _iteritems(self):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys
+        """Returns an iterator of all named result keys."""
+
+        values = _itervalues
+        """Returns an iterator of all named result values."""
+
+        items = _iteritems
+        """Returns an iterator of all named result key-value tuples."""
+
+    else:
+        iterkeys = _iterkeys
+        """Returns an iterator of all named result keys (Python 2.x only)."""
+
+        itervalues = _itervalues
+        """Returns an iterator of all named result values (Python 2.x only)."""
+
+        iteritems = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+        def keys(self):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values(self):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+
+        def items(self):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys(self):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self.__tokdict)
+
+    def pop(self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default= ``last``).
+        Supports both ``list`` and ``dict`` semantics for ``pop()``. If
+        passed no argument or an integer argument, it will use ``list``
+        semantics and pop tokens from the list of parsed tokens. If passed
+        a non-integer argument (most likely a string), it will use ``dict``
+        semantics and pop the corresponding value from any defined results
+        names. A second default return value argument is supported, just as in
+        ``dict.pop()``.
+
+        Example::
+
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+
+        prints::
+
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [-1]
+        for k, v in kwargs.items():
+            if k == 'default':
+                args = (args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+        if (isinstance(args[0], int)
+                or len(args) == 1
+                or args[0] in self):
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        else:
+            defaultvalue = args[1]
+            return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given ``defaultValue`` or ``None`` if no
+        ``defaultValue`` is specified.
+
+        Similar to ``dict.get()``.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        else:
+            return defaultValue
+
+    def insert(self, index, insStr):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+
+        Similar to ``list.insert()``.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self.__toklist.insert(index, insStr)
+        # fixup indices in token dictionary
+        for name, occurrences in self.__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append(self, item):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self.__toklist.append(item)
+
+    def extend(self, itemseq):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self.__iadd__(itemseq)
+        else:
+            self.__toklist.extend(itemseq)
+
+    def clear(self):
+        """
+        Clear all elements and results names.
+        """
+        del self.__toklist[:]
+        self.__tokdict.clear()
+
+    def __getattr__(self, name):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+
+    def __add__(self, other):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__(self, other):
+        if other.__tokdict:
+            offset = len(self.__toklist)
+            addoffset = lambda a: offset if a < 0 else a + offset
+            otheritems = other.__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1])))
+                              for k, vlist in otheritems for v in vlist]
+            for k, v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0], ParseResults):
+                    v[0].__parent = wkref(self)
+
+        self.__toklist += other.__toklist
+        self.__accumNames.update(other.__accumNames)
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other, int) and other == 0:
+            # useful for merging many ParseResults using sum() builtin
+            return self.copy()
+        else:
+            # this may raise a TypeError - so be it
+            return other + self
+
+    def __repr__(self):
+        return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict))
+
+    def __str__(self):
+        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+    def _asStringList(self, sep=''):
+        out = []
+        for item in self.__toklist:
+            if out and sep:
+                out.append(sep)
+            if isinstance(item, ParseResults):
+                out += item._asStringList()
+            else:
+                out.append(_ustr(item))
+        return out
+
+    def asList(self):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
+
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist]
+
+    def asDict(self):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                else:
+                    return [toItem(v) for v in obj]
+            else:
+                return obj
+
+        return dict((k, toItem(v)) for k, v in item_fn())
+
+    def copy(self):
+        """
+        Returns a new copy of a :class:`ParseResults` object.
+        """
+        ret = ParseResults(self.__toklist)
+        ret.__tokdict = dict(self.__tokdict.items())
+        ret.__parent = self.__parent
+        ret.__accumNames.update(self.__accumNames)
+        ret.__name = self.__name
+        return ret
+
+    def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items()
+                          for v in vlist)
+        nextLevelIndent = indent + "  "
+
+        # collapse out indents if formatting is not desired
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+
+        selfTag = None
+        if doctag is not None:
+            selfTag = doctag
+        else:
+            if self.__name:
+                selfTag = self.__name
+
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            else:
+                selfTag = "ITEM"
+
+        out += [nl, indent, "<", selfTag, ">"]
+
+        for i, res in enumerate(self.__toklist):
+            if isinstance(res, ParseResults):
+                if i in namedItems:
+                    out += [res.asXML(namedItems[i],
+                                      namedItemsOnly and doctag is None,
+                                      nextLevelIndent,
+                                      formatted)]
+                else:
+                    out += [res.asXML(None,
+                                      namedItemsOnly and doctag is None,
+                                      nextLevelIndent,
+                                      formatted)]
+            else:
+                # individual token, see if there is a name for it
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [nl, nextLevelIndent, "<", resTag, ">",
+                        xmlBodyText,
+                                                "</", resTag, ">"]
+
+        out += [nl, indent, "</", selfTag, ">"]
+        return "".join(out)
+
+    def __lookup(self, sub):
+        for k, vlist in self.__tokdict.items():
+            for v, loc in vlist:
+                if sub is v:
+                    return k
+        return None
+
+    def getName(self):
+        r"""
+        Returns the results name for this token expression. Useful when several
+        different expressions might match at a particular location.
+
+        Example::
+
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number")
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+
+        prints::
+
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self.__name:
+            return self.__name
+        elif self.__parent:
+            par = self.__parent()
+            if par:
+                return par.__lookup(self)
+            else:
+                return None
+        elif (len(self) == 1
+              and len(self.__tokdict) == 1
+              and next(iter(self.__tokdict.values()))[0][1] in (0, -1)):
+            return next(iter(self.__tokdict.keys()))
+        else:
+            return None
+
+    def dump(self, indent='', full=True, include_list=True, _depth=0):
+        """
+        Diagnostic method for listing out the contents of
+        a :class:`ParseResults`. Accepts an optional ``indent`` argument so
+        that this string can be embedded in a nested display of other data.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+
+        prints::
+
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = '\n'
+        if include_list:
+            out.append(indent + _ustr(self.asList()))
+        else:
+            out.append('')
+
+        if full:
+            if self.haskeys():
+                items = sorted((str(k), v) for k, v in self.items())
+                for k, v in items:
+                    if out:
+                        out.append(NL)
+                    out.append("%s%s- %s: " % (indent, ('  ' * _depth), k))
+                    if isinstance(v, ParseResults):
+                        if v:
+                            out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1))
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+            elif any(isinstance(vv, ParseResults) for vv in self):
+                v = self
+                for i, vv in enumerate(v):
+                    if isinstance(vv, ParseResults):
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                                                            ('  ' * (_depth)),
+                                                            i,
+                                                            indent,
+                                                            ('  ' * (_depth + 1)),
+                                                            vv.dump(indent=indent,
+                                                                    full=full,
+                                                                    include_list=include_list,
+                                                                    _depth=_depth + 1)))
+                    else:
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                                                            ('  ' * (_depth)),
+                                                            i,
+                                                            indent,
+                                                            ('  ' * (_depth + 1)),
+                                                            _ustr(vv)))
+
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the
+        `pprint <https://docs.python.org/3/library/pprint.html>`_ module.
+        Accepts additional positional or keyword args as defined for
+        `pprint.pprint <https://docs.python.org/3/library/pprint.html#pprint.pprint>`_ .
+
+        Example::
+
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+
+        prints::
+
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        pprint.pprint(self.asList(), *args, **kwargs)
+
+    # add support for pickle protocol
+    def __getstate__(self):
+        return (self.__toklist,
+                (self.__tokdict.copy(),
+                 self.__parent is not None and self.__parent() or None,
+                 self.__accumNames,
+                 self.__name))
+
+    def __setstate__(self, state):
+        self.__toklist = state[0]
+        self.__tokdict, par, inAccumNames, self.__name = state[1]
+        self.__accumNames = {}
+        self.__accumNames.update(inAccumNames)
+        if par is not None:
+            self.__parent = wkref(par)
+        else:
+            self.__parent = None
+
+    def __getnewargs__(self):
+        return self.__toklist, self.__name, self.__asList, self.__modal
+
+    def __dir__(self):
+        return dir(type(self)) + list(self.keys())
+
+    @classmethod
+    def from_dict(cls, other, name=None):
+        """
+        Helper classmethod to construct a ParseResults from a dict, preserving the
+        name-value relations as results names. If an optional 'name' argument is
+        given, a nested ParseResults will be returned
+        """
+        def is_iterable(obj):
+            try:
+                iter(obj)
+            except Exception:
+                return False
+            else:
+                if PY_3:
+                    return not isinstance(obj, (str, bytes))
+                else:
+                    return not isinstance(obj, basestring)
+
+        ret = cls([])
+        for k, v in other.items():
+            if isinstance(v, Mapping):
+                ret += cls.from_dict(v, name=k)
+            else:
+                ret += cls([v], name=k, asList=is_iterable(v))
+        if name is not None:
+            ret = cls([ret], name=name)
+        return ret
+
+MutableMapping.register(ParseResults)
+
+def col (loc, strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See
+   :class:`ParserElement.parseString` for more
+   information on parsing strings containing ``<TAB>`` s, and suggested
+   methods to maintain a consistent view of the parsed string, the parse
+   location, and line and column positions within the parsed string.
+   """
+    s = strg
+    return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc, strg):
+    """Returns current line number within a string, counting newlines as line separators.
+    The first line is number 1.
+
+    Note - the default parsing behavior is to expand tabs in the input string
+    before starting the parsing process.  See :class:`ParserElement.parseString`
+    for more information on parsing strings containing ``<TAB>`` s, and
+    suggested methods to maintain a consistent view of the parsed string, the
+    parse location, and line and column positions within the parsed string.
+    """
+    return strg.count("\n", 0, loc) + 1
+
+def line(loc, strg):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[lastCR + 1:nextCR]
+    else:
+        return strg[lastCR + 1:]
+
+def _defaultStartDebugAction(instring, loc, expr):
+    print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring))))
+
+def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks):
+    print("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction(instring, loc, expr, exc):
+    print("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+    #~ if func in singleArgBuiltins:
+        #~ return lambda s,l,t: func(t)
+    #~ limit = 0
+    #~ foundArity = False
+    #~ def wrapper(*args):
+        #~ nonlocal limit,foundArity
+        #~ while 1:
+            #~ try:
+                #~ ret = func(*args[limit:])
+                #~ foundArity = True
+                #~ return ret
+            #~ except TypeError:
+                #~ if limit == maxargs or foundArity:
+                    #~ raise
+                #~ limit += 1
+                #~ continue
+    #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return lambda s, l, t: func(t)
+    limit = [0]
+    foundArity = [False]
+
+    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+    if system_version[:2] >= (3, 5):
+        def extract_stack(limit=0):
+            # special handling for Python 3.5.0 - extra deep call stack by 1
+            offset = -3 if system_version == (3, 5, 0) else -2
+            frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset]
+            return [frame_summary[:2]]
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [frame_summary[:2]]
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+
+    # synthesize what would be returned by traceback.extract_stack at the call to
+    # user's parse action 'func', so that we don't incur call penalty at parse time
+
+    LINE_DIFF = 6
+    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF)
+
+    def wrapper(*args):
+        while 1:
+            try:
+                ret = func(*args[limit[0]:])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                # re-raise TypeErrors if they did not come from our arity testing
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+                            raise
+                    finally:
+                        try:
+                            del tb
+                        except NameError:
+                            pass
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    # copy func name to wrapper for sensible debug output
+    func_name = "<parse action>"
+    try:
+        func_name = getattr(func, '__name__',
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    wrapper.__name__ = func_name
+
+    return wrapper
+
+
+class ParserElement(object):
+    """Abstract base level parser element class."""
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars(chars):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+
+            # default whitespace chars are space, <TAB> and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+
+        Example::
+
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    @classmethod
+    def _trim_traceback(cls, tb):
+        while tb.tb_next:
+            tb = tb.tb_next
+        return tb
+
+    def __init__(self, savelist=False):
+        self.parseAction = list()
+        self.failAction = None
+        # ~ self.name = "<unknown>"  # don't define self.name, let subclasses try/except upcall
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS)
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False # used when checking for left-recursion
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+        self.errmsg = ""
+        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+        self.debugActions = (None, None, None)  # custom debug actions
+        self.re = None
+        self.callPreparse = True # used to avoid redundant calls to preParse
+        self.callDuringTry = False
+
+    def copy(self):
+        """
+        Make a copy of this :class:`ParserElement`.  Useful for defining
+        different parse actions for the same parsing pattern, using copies of
+        the original parse element.
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+
+        prints::
+
+            [5120, 100, 655360, 268435456]
+
+        Equivalent form of ``expr.copy()`` is just ``expr()``::
+
+            integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+        """
+        cpy = copy.copy(self)
+        cpy.parseAction = self.parseAction[:]
+        cpy.ignoreExprs = self.ignoreExprs[:]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName(self, name):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+
+        Example::
+
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if __diag__.enable_debug_on_named_expressions:
+            self.setDebug()
+        return self
+
+    def setResultsName(self, name, listAllMatches=False):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original :class:`ParserElement` object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        ``expr("name")`` in place of ``expr.setResultsName("name")``
+        - see :class:`__call__`.
+
+        Example::
+
+            date_str = (integer.setResultsName("year") + '/'
+                        + integer.setResultsName("month") + '/'
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        return self._setResultsName(name, listAllMatches)
+
+    def _setResultsName(self, name, listAllMatches=False):
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[:-1]
+            listAllMatches = True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self, breakFlag=True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set ``breakFlag`` to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                # this call to pdb.set_trace() is intentional, not a checkin error
+                pdb.set_trace()
+                return _parseMethod(instring, loc, doActions, callPreParse)
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse, "_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction(self, *fns, **kwargs):
+        """
+        Define one or more actions to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` ,
+        ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where:
+
+        - s   = the original string being parsed (see note below)
+        - loc = the location of the matching substring
+        - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object
+
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        If None is passed as the parse action, all previously added parse actions for this
+        expression are cleared.
+
+        Optional keyword arguments:
+        - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See :class:`parseString for more
+        information on parsing strings containing ``<TAB>`` s, and suggested
+        methods to maintain a consistent view of the parsed string, the parse
+        location, and line and column positions within the parsed string.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        if list(fns) == [None,]:
+            self.parseAction = []
+        else:
+            if not all(callable(fn) for fn in fns):
+                raise TypeError("parse actions must be callable")
+            self.parseAction = list(map(_trim_arity, list(fns)))
+            self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction(self, *fns, **kwargs):
+        """
+        Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`.
+
+        See examples in :class:`copy`.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See
+        :class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
+        functions passed to ``addCondition`` need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+        - message = define a custom message to be used in the raised exception
+        - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        for fn in fns:
+            self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'),
+                                                           fatal=kwargs.get('fatal', False)))
+
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction(self, fn):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           ``fn(s, loc, expr, err)`` where:
+           - s = string being parsed
+           - loc = location where expression match was attempted and failed
+           - expr = the parse expression that failed
+           - err = the exception thrown
+           The function returns no value.  It may throw :class:`ParseFatalException`
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables(self, instring, loc):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while 1:
+                        loc, dummy = e._parse(instring, loc)
+                        exprsFound = True
+                except ParseException:
+                    pass
+        return loc
+
+    def preParse(self, instring, loc):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables(instring, loc)
+
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl(self, instring, loc, doActions=True):
+        return loc, []
+
+    def postParse(self, instring, loc, tokenlist):
+        return tokenlist
+
+    # ~ @profile
+    def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True):
+        TRY, MATCH, FAIL = 0, 1, 2
+        debugging = (self.debug)  # and doActions)
+
+        if debugging or self.failAction:
+            # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring)))
+            if self.debugActions[TRY]:
+                self.debugActions[TRY](instring, loc, self)
+            try:
+                if callPreParse and self.callPreparse:
+                    preloc = self.preParse(instring, loc)
+                else:
+                    preloc = loc
+                tokensStart = preloc
+                if self.mayIndexError or preloc >= len(instring):
+                    try:
+                        loc, tokens = self.parseImpl(instring, preloc, doActions)
+                    except IndexError:
+                        raise ParseException(instring, len(instring), self.errmsg, self)
+                else:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+            except Exception as err:
+                # ~ print ("Exception raised:", err)
+                if self.debugActions[FAIL]:
+                    self.debugActions[FAIL](instring, tokensStart, self, err)
+                if self.failAction:
+                    self.failAction(instring, tokensStart, self, err)
+                raise
+        else:
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse(instring, loc)
+            else:
+                preloc = loc
+            tokensStart = preloc
+            if self.mayIndexError or preloc >= len(instring):
+                try:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+                except IndexError:
+                    raise ParseException(instring, len(instring), self.errmsg, self)
+            else:
+                loc, tokens = self.parseImpl(instring, preloc, doActions)
+
+        tokens = self.postParse(instring, loc, tokens)
+
+        retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults)
+        if self.parseAction and (doActions or self.callDuringTry):
+            if debugging:
+                try:
+                    for fn in self.parseAction:
+                        try:
+                            tokens = fn(instring, tokensStart, retTokens)
+                        except IndexError as parse_action_exc:
+                            exc = ParseException("exception raised in parse action")
+                            exc.__cause__ = parse_action_exc
+                            raise exc
+
+                        if tokens is not None and tokens is not retTokens:
+                            retTokens = ParseResults(tokens,
+                                                      self.resultsName,
+                                                      asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
+                                                      modal=self.modalResults)
+                except Exception as err:
+                    # ~ print "Exception raised in user parse action:", err
+                    if self.debugActions[FAIL]:
+                        self.debugActions[FAIL](instring, tokensStart, self, err)
+                    raise
+            else:
+                for fn in self.parseAction:
+                    try:
+                        tokens = fn(instring, tokensStart, retTokens)
+                    except IndexError as parse_action_exc:
+                        exc = ParseException("exception raised in parse action")
+                        exc.__cause__ = parse_action_exc
+                        raise exc
+
+                    if tokens is not None and tokens is not retTokens:
+                        retTokens = ParseResults(tokens,
+                                                  self.resultsName,
+                                                  asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
+                                                  modal=self.modalResults)
+        if debugging:
+            # ~ print ("Matched", self, "->", retTokens.asList())
+            if self.debugActions[MATCH]:
+                self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens)
+
+        return loc, retTokens
+
+    def tryParse(self, instring, loc):
+        try:
+            return self._parse(instring, loc, doActions=False)[0]
+        except ParseFatalException:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+
+            def cache_len(self):
+                return len(cache)
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+            self.__len__ = types.MethodType(cache_len, self)
+
+    if _OrderedDict is not None:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(cache) > size:
+                        try:
+                            cache.popitem(False)
+                        except KeyError:
+                            pass
+
+                def clear(self):
+                    cache.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    else:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(key_fifo) > size:
+                        cache.pop(key_fifo.popleft(), None)
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    # argument cache for optimizing repeated calls when backtracking through recursive expressions
+    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    # this method gets repeatedly called during backtracking with the same arguments -
+    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+    def _parseCache(self, instring, loc, doActions=True, callPreParse=True):
+        HIT, MISS = 0, 1
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    # cache a copy of the exception, without the traceback
+                    cache.set(lookup, pe.__class__(*pe.args))
+                    raise
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return value[0], value[1].copy()
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+
+           Parameters:
+
+           - cache_size_limit - (default= ``128``) - if an integer value is provided
+             will limit the size of the packrat cache; if None is passed, then
+             the cache size will be unbounded; if 0 is passed, the cache will
+             be effectively disabled.
+
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method :class:`ParserElement.enablePackrat`.
+           For best results, call ``enablePackrat()`` immediately after
+           importing pyparsing.
+
+           Example::
+
+               import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString(self, instring, parseAll=False):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        Returns the parsed data as a :class:`ParseResults` object, which may be
+        accessed as a list, or as a dict or object with attributes if the given parser
+        includes results names.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set ``parseAll`` to True (equivalent to ending
+        the grammar with ``StringEnd()``).
+
+        Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the ``loc`` argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+
+        - calling ``parseWithTabs`` on your grammar before calling ``parseString``
+          (see :class:`parseWithTabs`)
+        - define your parse action using the full ``(s, loc, toks)`` signature, and
+          reference the input string using the parse action's ``s`` argument
+        - explictly expand the tabs in your input string before calling
+          ``parseString``
+
+        Example::
+
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+            # ~ self.saveAsList = True
+        for e in self.ignoreExprs:
+            e.streamline()
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse(instring, 0)
+            if parseAll:
+                loc = self.preParse(instring, loc)
+                se = Empty() + StringEnd()
+                se._parse(instring, loc)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+        else:
+            return tokens
+
+    def scanString(self, instring, maxMatches=_MAX_INT, overlap=False):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        ``maxMatches`` argument, to clip scanning after 'n' matches are found.  If
+        ``overlap`` is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See :class:`parseString` for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens, start, end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+
+        prints::
+
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = _ustr(instring).expandtabs()
+        instrlen = len(instring)
+        loc = 0
+        preparseFn = self.preParse
+        parseFn = self._parse
+        ParserElement.resetCache()
+        matches = 0
+        try:
+            while loc <= instrlen and matches < maxMatches:
+                try:
+                    preloc = preparseFn(instring, loc)
+                    nextLoc, tokens = parseFn(instring, preloc, callPreParse=False)
+                except ParseException:
+                    loc = preloc + 1
+                else:
+                    if nextLoc > loc:
+                        matches += 1
+                        yield tokens, preloc, nextLoc
+                        if overlap:
+                            nextloc = preparseFn(instring, loc)
+                            if nextloc > loc:
+                                loc = nextLoc
+                            else:
+                                loc += 1
+                        else:
+                            loc = nextLoc
+                    else:
+                        loc = preloc + 1
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def transformString(self, instring):
+        """
+        Extension to :class:`scanString`, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use ``transformString``, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking ``transformString()`` on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  ``transformString()`` returns the resulting transformed string.
+
+        Example::
+
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+
+        prints::
+
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
+        # keep string locs straight between transformString and scanString
+        self.keepTabs = True
+        try:
+            for t, s, e in self.scanString(instring):
+                out.append(instring[lastE:s])
+                if t:
+                    if isinstance(t, ParseResults):
+                        out += t.asList()
+                    elif isinstance(t, list):
+                        out += t
+                    else:
+                        out.append(t)
+                lastE = e
+            out.append(instring[lastE:])
+            out = [o for o in out if o]
+            return "".join(map(_ustr, _flatten(out)))
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def searchString(self, instring, maxMatches=_MAX_INT):
+        """
+        Another extension to :class:`scanString`, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        ``maxMatches`` argument, to clip searching after 'n' matches are found.
+
+        Example::
+
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+            # the sum() builtin can be used to merge results into a single ParseResults object
+            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+
+        prints::
+
+            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+        """
+        try:
+            return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)])
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional ``maxsplit`` argument, to limit the number of splits;
+        and the optional ``includeSeparators`` argument (default= ``False``), if the separating
+        matching text should be included in the split results.
+
+        Example::
+
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+
+        prints::
+
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t, s, e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last:s]
+            if includeSeparators:
+                yield t[0]
+            last = e
+        yield instring[last:]
+
+    def __add__(self, other):
+        """
+        Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
+        converts them to :class:`Literal`s by default.
+
+        Example::
+
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+
+        prints::
+
+            Hello, World! -> ['Hello', ',', 'World', '!']
+
+        ``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
+
+            Literal('start') + ... + Literal('end')
+
+        is equivalent to:
+
+            Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
+
+        Note that the skipped text is returned with '_skipped' as a results name,
+        and to support having multiple skips in the same parser, the value returned is
+        a list of all skipped text.
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self)
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return And([self, other])
+
+    def __radd__(self, other):
+        """
+        Implementation of + operator when left operand is not a :class:`ParserElement`
+        """
+        if other is Ellipsis:
+            return SkipTo(self)("_skipped*") + self
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns :class:`And` with error stop
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return self + And._ErrorStop() + other
+
+    def __rsub__(self, other):
+        """
+        Implementation of - operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other - self
+
+    def __mul__(self, other):
+        """
+        Implementation of * operator, allows use of ``expr * 3`` in place of
+        ``expr + expr + expr``.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to ``{min, max}`` multipliers in regular expressions.  Tuples
+        may also include ``None`` as in:
+         - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr*(None, n)`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)``
+         - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)``
+
+        Note that ``expr*(None, n)`` does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        ``expr*(None, n)`` does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        ``expr*(None, n) + ~expr``
+        """
+        if other is Ellipsis:
+            other = (0, None)
+        elif isinstance(other, tuple) and other[:1] == (Ellipsis,):
+            other = ((0, ) + other[1:] + (None,))[:2]
+
+        if isinstance(other, int):
+            minElements, optElements = other, 0
+        elif isinstance(other, tuple):
+            other = tuple(o if o is not Ellipsis else None for o in other)
+            other = (other + (None, None))[:2]
+            if other[0] is None:
+                other = (0, other[1])
+            if isinstance(other[0], int) and other[1] is None:
+                if other[0] == 0:
+                    return ZeroOrMore(self)
+                if other[0] == 1:
+                    return OneOrMore(self)
+                else:
+                    return self * other[0] + ZeroOrMore(self)
+            elif isinstance(other[0], int) and isinstance(other[1], int):
+                minElements, optElements = other
+                optElements -= minElements
+            else:
+                raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1]))
+        else:
+            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+        if minElements < 0:
+            raise ValueError("cannot multiply ParserElement by negative value")
+        if optElements < 0:
+            raise ValueError("second tuple value must be greater or equal to first tuple value")
+        if minElements == optElements == 0:
+            raise ValueError("cannot multiply ParserElement by 0 or (0, 0)")
+
+        if optElements:
+            def makeOptionalList(n):
+                if n > 1:
+                    return Optional(self + makeOptionalList(n - 1))
+                else:
+                    return Optional(self)
+            if minElements:
+                if minElements == 1:
+                    ret = self + makeOptionalList(optElements)
+                else:
+                    ret = And([self] * minElements) + makeOptionalList(optElements)
+            else:
+                ret = makeOptionalList(optElements)
+        else:
+            if minElements == 1:
+                ret = self
+            else:
+                ret = And([self] * minElements)
+        return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other):
+        """
+        Implementation of | operator - returns :class:`MatchFirst`
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self, must_skip=True)
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return MatchFirst([self, other])
+
+    def __ror__(self, other):
+        """
+        Implementation of | operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other | self
+
+    def __xor__(self, other):
+        """
+        Implementation of ^ operator - returns :class:`Or`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return Or([self, other])
+
+    def __rxor__(self, other):
+        """
+        Implementation of ^ operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other ^ self
+
+    def __and__(self, other):
+        """
+        Implementation of & operator - returns :class:`Each`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return Each([self, other])
+
+    def __rand__(self, other):
+        """
+        Implementation of & operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other & self
+
+    def __invert__(self):
+        """
+        Implementation of ~ operator - returns :class:`NotAny`
+        """
+        return NotAny(self)
+
+    def __iter__(self):
+        # must implement __iter__ to override legacy use of sequential access to __getitem__ to
+        # iterate over a sequence
+        raise TypeError('%r object is not iterable' % self.__class__.__name__)
+
+    def __getitem__(self, key):
+        """
+        use ``[]`` indexing notation as a short form for expression repetition:
+         - ``expr[n]`` is equivalent to ``expr*n``
+         - ``expr[m, n]`` is equivalent to ``expr*(m, n)``
+         - ``expr[n, ...]`` or ``expr[n,]`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr[..., n]`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)``
+         - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)``
+         ``None`` may be used in place of ``...``.
+
+        Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception
+        if more than ``n`` ``expr``s exist in the input stream.  If this behavior is
+        desired, then write ``expr[..., n] + ~expr``.
+       """
+
+        # convert single arg keys to tuples
+        try:
+            if isinstance(key, str):
+                key = (key,)
+            iter(key)
+        except TypeError:
+            key = (key, key)
+
+        if len(key) > 2:
+            warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5],
+                                                                                '... [{0}]'.format(len(key))
+                                                                                if len(key) > 5 else ''))
+
+        # clip to 2 elements
+        ret = self * tuple(key[:2])
+        return ret
+
+    def __call__(self, name=None):
+        """
+        Shortcut for :class:`setResultsName`, with ``listAllMatches=False``.
+
+        If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be
+        passed as ``True``.
+
+        If ``name` is omitted, same as calling :class:`copy`.
+
+        Example::
+
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums + "-")("socsecno")
+        """
+        if name is not None:
+            return self._setResultsName(name)
+        else:
+            return self.copy()
+
+    def suppress(self):
+        """
+        Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress(self)
+
+    def leaveWhitespace(self):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        :class:`ParserElement`'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars(self, chars):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs(self):
+        """
+        Overrides default behavior to expand ``<TAB>``s to spaces before parsing the input string.
+        Must be called before ``parseString`` when the input grammar contains elements that
+        match ``<TAB>`` characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore(self, other):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+
+        if isinstance(other, Suppress):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append(Suppress(other.copy()))
+        return self
+
+    def setDebugActions(self, startAction, successAction, exceptionAction):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (startAction or _defaultStartDebugAction,
+                             successAction or _defaultSuccessDebugAction,
+                             exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug(self, flag=True):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set ``flag`` to True to enable, False to disable.
+
+        Example::
+
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+
+        prints::
+
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using :class:`setDebugActions`. Prior to attempting
+        to match the ``wd`` expression, the debugging message ``"Match <exprname> at loc <n>(<line>,<col>)"``
+        is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"``
+        message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``.
+        """
+        if flag:
+            self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction)
+        else:
+            self.debug = False
+        return self
+
+    def __str__(self):
+        return self.name
+
+    def __repr__(self):
+        return _ustr(self)
+
+    def streamline(self):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion(self, parseElementList):
+        pass
+
+    def validate(self, validateTrace=None):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion([])
+
+    def parseFile(self, file_or_filename, parseAll=False):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def __eq__(self, other):
+        if self is other:
+            return True
+        elif isinstance(other, basestring):
+            return self.matches(other)
+        elif isinstance(other, ParserElement):
+            return vars(self) == vars(other)
+        return False
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __hash__(self):
+        return id(self)
+
+    def __req__(self, other):
+        return self == other
+
+    def __rne__(self, other):
+        return not (self == other)
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple
+        inline microtests of sub expressions while building up larger parser.
+
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+
+        Example::
+
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString(_ustr(testString), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+
+    def runTests(self, tests, parseAll=True, comment='#',
+                 fullDump=True, printResults=True, failureTests=False, postParse=None,
+                 file=None):
+        """
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+         - comment - (default= ``'#'``) - expression for indicating embedded comments in the test
+              string; pass None to disable comment filtering
+         - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default= ``True``) prints test output to stdout
+         - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing
+         - postParse - (default= ``None``) optional callback for successful parse results; called as
+              `fn(test_string, parse_results)` and returns a string to be added to the test output
+         - file - (default=``None``) optional file-like object to which test output will be written;
+              if None, will default to ``sys.stdout``
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if ``failureTests`` is True), and the results contain a list of lines of each
+        test's output
+
+        Example::
+
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+
+        prints::
+
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        if file is None:
+            file = sys.stdout
+        print_ = file.write
+
+        allResults = []
+        comments = []
+        success = True
+        NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString)
+        BOM = u'\ufeff'
+        for t in tests:
+            if comment is not None and comment.matches(t, False) or comments and not t:
+                comments.append(t)
+                continue
+            if not t:
+                continue
+            out = ['\n' + '\n'.join(comments) if comments else '', t]
+            comments = []
+            try:
+                # convert newline marks to actual newlines, and strip leading BOM if present
+                t = NL.transformString(t.lstrip(BOM))
+                result = self.parseString(t, parseAll=parseAll)
+            except ParseBaseException as pe:
+                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                if '\n' in t:
+                    out.append(line(pe.loc, t))
+                    out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal)
+                else:
+                    out.append(' ' * pe.loc + '^' + fatal)
+                out.append("FAIL: " + str(pe))
+                success = success and failureTests
+                result = pe
+            except Exception as exc:
+                out.append("FAIL-EXCEPTION: " + str(exc))
+                success = success and failureTests
+                result = exc
+            else:
+                success = success and not failureTests
+                if postParse is not None:
+                    try:
+                        pp_value = postParse(t, result)
+                        if pp_value is not None:
+                            if isinstance(pp_value, ParseResults):
+                                out.append(pp_value.dump())
+                            else:
+                                out.append(str(pp_value))
+                        else:
+                            out.append(result.dump())
+                    except Exception as e:
+                        out.append(result.dump(full=fullDump))
+                        out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e))
+                else:
+                    out.append(result.dump(full=fullDump))
+
+            if printResults:
+                if fullDump:
+                    out.append('')
+                print_('\n'.join(out))
+
+            allResults.append((t, result))
+
+        return success, allResults
+
+
+class _PendingSkip(ParserElement):
+    # internal placeholder class to hold a place were '...' is added to a parser element,
+    # once another ParserElement is added, this placeholder will be replaced with a SkipTo
+    def __init__(self, expr, must_skip=False):
+        super(_PendingSkip, self).__init__()
+        self.strRepr = str(expr + Empty()).replace('Empty', '...')
+        self.name = self.strRepr
+        self.anchor = expr
+        self.must_skip = must_skip
+
+    def __add__(self, other):
+        skipper = SkipTo(other).setName("...")("_skipped*")
+        if self.must_skip:
+            def must_skip(t):
+                if not t._skipped or t._skipped.asList() == ['']:
+                    del t[0]
+                    t.pop("_skipped", None)
+            def show_skip(t):
+                if t._skipped.asList()[-1:] == ['']:
+                    skipped = t.pop('_skipped')
+                    t['_skipped'] = 'missing <' + repr(self.anchor) + '>'
+            return (self.anchor + skipper().addParseAction(must_skip)
+                    | skipper().addParseAction(show_skip)) + other
+
+        return self.anchor + skipper + other
+
+    def __repr__(self):
+        return self.strRepr
+
+    def parseImpl(self, *args):
+        raise Exception("use of `...` expression without following SkipTo target expression")
+
+
+class Token(ParserElement):
+    """Abstract :class:`ParserElement` subclass, for defining atomic
+    matching patterns.
+    """
+    def __init__(self):
+        super(Token, self).__init__(savelist=False)
+
+
+class Empty(Token):
+    """An empty token, will always match.
+    """
+    def __init__(self):
+        super(Empty, self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    """A token that will never match.
+    """
+    def __init__(self):
+        super(NoMatch, self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    """Token to exactly match a specified string.
+
+    Example::
+
+        Literal('blah').parseString('blah')  # -> ['blah']
+        Literal('blah').parseString('blahfooblah')  # -> ['blah']
+        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
+
+    For case-insensitive matching, use :class:`CaselessLiteral`.
+
+    For keyword matching (force word break before and after the matched string),
+    use :class:`Keyword` or :class:`CaselessKeyword`.
+    """
+    def __init__(self, matchString):
+        super(Literal, self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+            self.__class__ = Empty
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+
+        # Performance tuning: modify __class__ to select
+        # a parseImpl optimized for single-character check
+        if self.matchLen == 1 and type(self) is Literal:
+            self.__class__ = _SingleCharLiteral
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc):
+            return loc + self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class _SingleCharLiteral(Literal):
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar:
+            return loc + 1, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    """Token to exactly match a specified string as a keyword, that is,
+    it must be immediately followed by a non-keyword character.  Compare
+    with :class:`Literal`:
+
+     - ``Literal("if")`` will match the leading ``'if'`` in
+       ``'ifAndOnlyIf'``.
+     - ``Keyword("if")`` will not; it will only match the leading
+       ``'if'`` in ``'if x=1'``, or ``'if(y==2)'``
+
+    Accepts two optional constructor arguments in addition to the
+    keyword string:
+
+     - ``identChars`` is a string of characters that would be valid
+       identifier characters, defaulting to all alphanumerics + "_" and
+       "$"
+     - ``caseless`` allows case-insensitive matching, default is ``False``.
+
+    Example::
+
+        Keyword("start").parseString("start")  # -> ['start']
+        Keyword("start").parseString("starting")  # -> Exception
+
+    For case-insensitive matching, use :class:`CaselessKeyword`.
+    """
+    DEFAULT_KEYWORD_CHARS = alphanums + "_$"
+
+    def __init__(self, matchString, identChars=None, caseless=False):
+        super(Keyword, self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead",
+                          SyntaxWarning, stacklevel=2)
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.caseless:
+            if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch)
+                    and (loc >= len(instring) - self.matchLen
+                         or instring[loc + self.matchLen].upper() not in self.identChars)
+                    and (loc == 0
+                         or instring[loc - 1].upper() not in self.identChars)):
+                return loc + self.matchLen, self.match
+
+        else:
+            if instring[loc] == self.firstMatchChar:
+                if ((self.matchLen == 1 or instring.startswith(self.match, loc))
+                        and (loc >= len(instring) - self.matchLen
+                             or instring[loc + self.matchLen] not in self.identChars)
+                        and (loc == 0 or instring[loc - 1] not in self.identChars)):
+                    return loc + self.matchLen, self.match
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword, self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars(chars):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+    """Token to match a specified string, ignoring case of letters.
+    Note: the matched results will always be in the case of the given
+    match string, NOT the case of the input text.
+
+    Example::
+
+        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+    (Contrast with example for :class:`CaselessKeyword`.)
+    """
+    def __init__(self, matchString):
+        super(CaselessLiteral, self).__init__(matchString.upper())
+        # Preserve the defining literal.
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc:loc + self.matchLen].upper() == self.match:
+            return loc + self.matchLen, self.returnString
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+    """
+    Caseless version of :class:`Keyword`.
+
+    Example::
+
+        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+    (Contrast with example for :class:`CaselessLiteral`.)
+    """
+    def __init__(self, matchString, identChars=None):
+        super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True)
+
+class CloseMatch(Token):
+    """A variation on :class:`Literal` which matches "close" matches,
+    that is, strings with at most 'n' mismatching characters.
+    :class:`CloseMatch` takes parameters:
+
+     - ``match_string`` - string to be matched
+     - ``maxMismatches`` - (``default=1``) maximum number of
+       mismatches allowed to count as a match
+
+    The results from a successful parse will contain the matched text
+    from the input string and the following named results:
+
+     - ``mismatches`` - a list of the positions within the
+       match_string where mismatches were found
+     - ``original`` - the original match_string used to compare
+       against the input string
+
+    If ``mismatches`` is an empty list, then the match was an exact
+    match.
+
+    Example::
+
+        patt = CloseMatch("ATCATCGAATGGA")
+        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+        # exact match
+        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+        # close match allowing up to 2 mismatches
+        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+    """
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch, self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl(self, instring, loc, doActions=True):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+
+            for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)):
+                src, mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start:loc]])
+                results['original'] = match_string
+                results['mismatches'] = mismatches
+                return loc, results
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    """Token for matching words composed of allowed character sets.
+    Defined with string containing all allowed initial characters, an
+    optional string containing allowed body characters (if omitted,
+    defaults to the initial character set), and an optional minimum,
+    maximum, and/or exact length.  The default value for ``min`` is
+    1 (a minimum value < 1 is not valid); the default values for
+    ``max`` and ``exact`` are 0, meaning no maximum or exact
+    length restriction. An optional ``excludeChars`` parameter can
+    list characters that might be found in the input ``bodyChars``
+    string; useful to define a word of all printables except for one or
+    two characters, for instance.
+
+    :class:`srange` is useful for defining custom character set strings
+    for defining ``Word`` expressions, using range notation from
+    regular expression character sets.
+
+    A common mistake is to use :class:`Word` to match a specific literal
+    string, as in ``Word("Address")``. Remember that :class:`Word`
+    uses the string argument to define *sets* of matchable characters.
+    This expression would match "Add", "AAA", "dAred", or any other word
+    made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an
+    exact literal string, use :class:`Literal` or :class:`Keyword`.
+
+    pyparsing includes helper strings for building Words:
+
+     - :class:`alphas`
+     - :class:`nums`
+     - :class:`alphanums`
+     - :class:`hexnums`
+     - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255
+       - accented, tilded, umlauted, etc.)
+     - :class:`punc8bit` (non-alphabetic characters in ASCII range
+       128-255 - currency, symbols, superscripts, diacriticals, etc.)
+     - :class:`printables` (any non-whitespace character)
+
+    Example::
+
+        # a word composed of digits
+        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+        # a word with a leading capital, and zero or more lowercase
+        capital_word = Word(alphas.upper(), alphas.lower())
+
+        # hostnames are alphanumeric, with leading alpha, and '-'
+        hostname = Word(alphas, alphanums + '-')
+
+        # roman numeral (not a strict parser, accepts invalid mix of characters)
+        roman = Word("IVXLCDM")
+
+        # any string of non-whitespace characters, except for ','
+        csv_value = Word(printables, excludeChars=",")
+    """
+    def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None):
+        super(Word, self).__init__()
+        if excludeChars:
+            excludeChars = set(excludeChars)
+            initChars = ''.join(c for c in initChars if c not in excludeChars)
+            if bodyChars:
+                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+        self.initCharsOrig = initChars
+        self.initChars = set(initChars)
+        if bodyChars:
+            self.bodyCharsOrig = bodyChars
+            self.bodyChars = set(bodyChars)
+        else:
+            self.bodyCharsOrig = initChars
+            self.bodyChars = set(initChars)
+
+        self.maxSpecified = max > 0
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.asKeyword = asKeyword
+
+        if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0):
+            if self.bodyCharsOrig == self.initCharsOrig:
+                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+            elif len(self.initCharsOrig) == 1:
+                self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig),
+                                             _escapeRegexRangeChars(self.bodyCharsOrig),)
+            else:
+                self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig),
+                                               _escapeRegexRangeChars(self.bodyCharsOrig),)
+            if self.asKeyword:
+                self.reString = r"\b" + self.reString + r"\b"
+
+            try:
+                self.re = re.compile(self.reString)
+            except Exception:
+                self.re = None
+            else:
+                self.re_match = self.re.match
+                self.__class__ = _WordRegex
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] not in self.initChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min(maxloc, instrlen)
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+            throwException = True
+        elif self.asKeyword:
+            if (start > 0 and instring[start - 1] in bodychars
+                    or loc < instrlen and instring[loc] in bodychars):
+                throwException = True
+
+        if throwException:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__(self):
+        try:
+            return super(Word, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+
+            def charsAsStr(s):
+                if len(s) > 4:
+                    return s[:4] + "..."
+                else:
+                    return s
+
+            if self.initCharsOrig != self.bodyCharsOrig:
+                self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig))
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+        return self.strRepr
+
+class _WordRegex(Word):
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        return loc, result.group()
+
+
+class Char(_WordRegex):
+    """A short-cut class for defining ``Word(characters, exact=1)``,
+    when defining a match of any single character in a string of
+    characters.
+    """
+    def __init__(self, charset, asKeyword=False, excludeChars=None):
+        super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
+        self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars))
+        if asKeyword:
+            self.reString = r"\b%s\b" % self.reString
+        self.re = re.compile(self.reString)
+        self.re_match = self.re.match
+
+
+class Regex(Token):
+    r"""Token for matching strings that match a given regular
+    expression. Defined with string specifying the regular expression in
+    a form recognized by the stdlib Python  `re module <https://docs.python.org/3/library/re.html>`_.
+    If the given regex contains named groups (defined using ``(?P<name>...)``),
+    these will be preserved as named parse results.
+
+    If instead of the Python stdlib re module you wish to use a different RE module
+    (such as the `regex` module), you can replace it by either building your
+    Regex object with a compiled RE that was compiled using regex:
+
+    Example::
+
+        realnum = Regex(r"[+-]?\d+\.\d*")
+        date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
+        # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+        roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+
+        # use regex module instead of stdlib re module to construct a Regex using
+        # a compiled regular expression
+        import regex
+        parser = pp.Regex(regex.compile(r'[0-9]'))
+
+    """
+    def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False):
+        """The parameters ``pattern`` and ``flags`` are passed
+        to the ``re.compile()`` function as-is. See the Python
+        `re module <https://docs.python.org/3/library/re.html>`_ module for an
+        explanation of the acceptable patterns and flags.
+        """
+        super(Regex, self).__init__()
+
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead",
+                              SyntaxWarning, stacklevel=2)
+
+            self.pattern = pattern
+            self.flags = flags
+
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+                              SyntaxWarning, stacklevel=2)
+                raise
+
+        elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'):
+            self.re = pattern
+            self.pattern = self.reString = pattern.pattern
+            self.flags = flags
+
+        else:
+            raise TypeError("Regex may only be constructed with a string or a compiled RE object")
+
+        self.re_match = self.re.match
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = self.re_match("") is not None
+        self.asGroupList = asGroupList
+        self.asMatch = asMatch
+        if self.asGroupList:
+            self.parseImpl = self.parseImplAsGroupList
+        if self.asMatch:
+            self.parseImpl = self.parseImplAsMatch
+
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = ParseResults(result.group())
+        d = result.groupdict()
+        if d:
+            for k, v in d.items():
+                ret[k] = v
+        return loc, ret
+
+    def parseImplAsGroupList(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.groups()
+        return loc, ret
+
+    def parseImplAsMatch(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result
+        return loc, ret
+
+    def __str__(self):
+        try:
+            return super(Regex, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+        return self.strRepr
+
+    def sub(self, repl):
+        r"""
+        Return Regex with an attached parse action to transform the parsed
+        result as if called using `re.sub(expr, repl, string) <https://docs.python.org/3/library/re.html#re.sub>`_.
+
+        Example::
+
+            make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2</\1>")
+            print(make_html.transformString("h1:main title:"))
+            # prints "<h1>main title</h1>"
+        """
+        if self.asGroupList:
+            warnings.warn("cannot use sub() with Regex(asGroupList=True)",
+                          SyntaxWarning, stacklevel=2)
+            raise SyntaxError()
+
+        if self.asMatch and callable(repl):
+            warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)",
+                          SyntaxWarning, stacklevel=2)
+            raise SyntaxError()
+
+        if self.asMatch:
+            def pa(tokens):
+                return tokens[0].expand(repl)
+        else:
+            def pa(tokens):
+                return self.re.sub(repl, tokens[0])
+        return self.addParseAction(pa)
+
+class QuotedString(Token):
+    r"""
+    Token for matching strings that are delimited by quoting characters.
+
+    Defined with the following parameters:
+
+        - quoteChar - string of one or more characters defining the
+          quote delimiting string
+        - escChar - character to escape quotes, typically backslash
+          (default= ``None``)
+        - escQuote - special quote sequence to escape an embedded quote
+          string (such as SQL's ``""`` to escape an embedded ``"``)
+          (default= ``None``)
+        - multiline - boolean indicating whether quotes can span
+          multiple lines (default= ``False``)
+        - unquoteResults - boolean indicating whether the matched text
+          should be unquoted (default= ``True``)
+        - endQuoteChar - string of one or more characters defining the
+          end of the quote delimited string (default= ``None``  => same as
+          quoteChar)
+        - convertWhitespaceEscapes - convert escaped whitespace
+          (``'\t'``, ``'\n'``, etc.) to actual whitespace
+          (default= ``True``)
+
+    Example::
+
+        qs = QuotedString('"')
+        print(qs.searchString('lsjdf "This is the quote" sldjf'))
+        complex_qs = QuotedString('{{', endQuoteChar='}}')
+        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+        sql_qs = QuotedString('"', escQuote='""')
+        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+
+    prints::
+
+        [['This is the quote']]
+        [['This is the "quote"']]
+        [['This is the quote with "embedded" quotes']]
+    """
+    def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False,
+                 unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+        super(QuotedString, self).__init__()
+
+        # remove white space from quote chars - wont work anyway
+        quoteChar = quoteChar.strip()
+        if not quoteChar:
+            warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2)
+            raise SyntaxError()
+
+        if endQuoteChar is None:
+            endQuoteChar = quoteChar
+        else:
+            endQuoteChar = endQuoteChar.strip()
+            if not endQuoteChar:
+                warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2)
+                raise SyntaxError()
+
+        self.quoteChar = quoteChar
+        self.quoteCharLen = len(quoteChar)
+        self.firstQuoteChar = quoteChar[0]
+        self.endQuoteChar = endQuoteChar
+        self.endQuoteCharLen = len(endQuoteChar)
+        self.escChar = escChar
+        self.escQuote = escQuote
+        self.unquoteResults = unquoteResults
+        self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+        if multiline:
+            self.flags = re.MULTILINE | re.DOTALL
+            self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar),
+                                              _escapeRegexRangeChars(self.endQuoteChar[0]),
+                                              (escChar is not None and _escapeRegexRangeChars(escChar) or ''))
+        else:
+            self.flags = 0
+            self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar),
+                                                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                                                  (escChar is not None and _escapeRegexRangeChars(escChar) or ''))
+        if len(self.endQuoteChar) > 1:
+            self.pattern += (
+                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+                                                   _escapeRegexRangeChars(self.endQuoteChar[i]))
+                                      for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')')
+
+        if escQuote:
+            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+        if escChar:
+            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+            self.escCharReplacePattern = re.escape(self.escChar) + "(.)"
+        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+        try:
+            self.re = re.compile(self.pattern, self.flags)
+            self.reString = self.pattern
+            self.re_match = self.re.match
+        except sre_constants.error:
+            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+                          SyntaxWarning, stacklevel=2)
+            raise
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl(self, instring, loc, doActions=True):
+        result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.group()
+
+        if self.unquoteResults:
+
+            # strip off quotes
+            ret = ret[self.quoteCharLen: -self.endQuoteCharLen]
+
+            if isinstance(ret, basestring):
+                # replace escaped whitespace
+                if '\\' in ret and self.convertWhitespaceEscapes:
+                    ws_map = {
+                        r'\t': '\t',
+                        r'\n': '\n',
+                        r'\f': '\f',
+                        r'\r': '\r',
+                    }
+                    for wslit, wschar in ws_map.items():
+                        ret = ret.replace(wslit, wschar)
+
+                # replace escaped characters
+                if self.escChar:
+                    ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+                # replace escaped quotes
+                if self.escQuote:
+                    ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+        return loc, ret
+
+    def __str__(self):
+        try:
+            return super(QuotedString, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+        return self.strRepr
+
+
+class CharsNotIn(Token):
+    """Token for matching words composed of characters *not* in a given
+    set (will include whitespace in matched characters if not listed in
+    the provided exclusion set - see example). Defined with string
+    containing all disallowed characters, and an optional minimum,
+    maximum, and/or exact length.  The default value for ``min`` is
+    1 (a minimum value < 1 is not valid); the default values for
+    ``max`` and ``exact`` are 0, meaning no maximum or exact
+    length restriction.
+
+    Example::
+
+        # define a comma-separated-value as anything that is not a ','
+        csv_value = CharsNotIn(',')
+        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+
+    prints::
+
+        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+    """
+    def __init__(self, notChars, min=1, max=0, exact=0):
+        super(CharsNotIn, self).__init__()
+        self.skipWhitespace = False
+        self.notChars = notChars
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use "
+                             "Optional(CharsNotIn()) if zero-length char group is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = (self.minLen == 0)
+        self.mayIndexError = False
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] in self.notChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        notchars = self.notChars
+        maxlen = min(start + self.maxLen, len(instring))
+        while loc < maxlen and instring[loc] not in notchars:
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__(self):
+        try:
+            return super(CharsNotIn, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            if len(self.notChars) > 4:
+                self.strRepr = "!W:(%s...)" % self.notChars[:4]
+            else:
+                self.strRepr = "!W:(%s)" % self.notChars
+
+        return self.strRepr
+
+class White(Token):
+    """Special matching class for matching whitespace.  Normally,
+    whitespace is ignored by pyparsing grammars.  This class is included
+    when some whitespace structures are significant.  Define with
+    a string containing the whitespace characters to be matched; default
+    is ``" \\t\\r\\n"``.  Also takes optional ``min``,
+    ``max``, and ``exact`` arguments, as defined for the
+    :class:`Word` class.
+    """
+    whiteStrs = {
+        ' ' : '<SP>',
+        '\t': '<TAB>',
+        '\n': '<LF>',
+        '\r': '<CR>',
+        '\f': '<FF>',
+        u'\u00A0': '<NBSP>',
+        u'\u1680': '<OGHAM_SPACE_MARK>',
+        u'\u180E': '<MONGOLIAN_VOWEL_SEPARATOR>',
+        u'\u2000': '<EN_QUAD>',
+        u'\u2001': '<EM_QUAD>',
+        u'\u2002': '<EN_SPACE>',
+        u'\u2003': '<EM_SPACE>',
+        u'\u2004': '<THREE-PER-EM_SPACE>',
+        u'\u2005': '<FOUR-PER-EM_SPACE>',
+        u'\u2006': '<SIX-PER-EM_SPACE>',
+        u'\u2007': '<FIGURE_SPACE>',
+        u'\u2008': '<PUNCTUATION_SPACE>',
+        u'\u2009': '<THIN_SPACE>',
+        u'\u200A': '<HAIR_SPACE>',
+        u'\u200B': '<ZERO_WIDTH_SPACE>',
+        u'\u202F': '<NNBSP>',
+        u'\u205F': '<MMSP>',
+        u'\u3000': '<IDEOGRAPHIC_SPACE>',
+        }
+    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+        super(White, self).__init__()
+        self.matchWhite = ws
+        self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite))
+        # ~ self.leaveWhitespace()
+        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+        self.mayReturnEmpty = True
+        self.errmsg = "Expected " + self.name
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] not in self.matchWhite:
+            raise ParseException(instring, loc, self.errmsg, self)
+        start = loc
+        loc += 1
+        maxloc = start + self.maxLen
+        maxloc = min(maxloc, len(instring))
+        while loc < maxloc and instring[loc] in self.matchWhite:
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+    def __init__(self):
+        super(_PositionToken, self).__init__()
+        self.name = self.__class__.__name__
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+    """Token to advance to a specific column of input text; useful for
+    tabular report scraping.
+    """
+    def __init__(self, colno):
+        super(GoToColumn, self).__init__()
+        self.col = colno
+
+    def preParse(self, instring, loc):
+        if col(loc, instring) != self.col:
+            instrlen = len(instring)
+            if self.ignoreExprs:
+                loc = self._skipIgnorables(instring, loc)
+            while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col:
+                loc += 1
+        return loc
+
+    def parseImpl(self, instring, loc, doActions=True):
+        thiscol = col(loc, instring)
+        if thiscol > self.col:
+            raise ParseException(instring, loc, "Text not in expected column", self)
+        newloc = loc + self.col - thiscol
+        ret = instring[loc: newloc]
+        return newloc, ret
+
+
+class LineStart(_PositionToken):
+    r"""Matches if current position is at the beginning of a line within
+    the parse string
+
+    Example::
+
+        test = '''\
+        AAA this line
+        AAA and this line
+          AAA but not this one
+        B AAA and definitely not this one
+        '''
+
+        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+            print(t)
+
+    prints::
+
+        ['AAA', ' this line']
+        ['AAA', ' and this line']
+
+    """
+    def __init__(self):
+        super(LineStart, self).__init__()
+        self.errmsg = "Expected start of line"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if col(loc, instring) == 1:
+            return loc, []
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+    """Matches if current position is at the end of a line within the
+    parse string
+    """
+    def __init__(self):
+        super(LineEnd, self).__init__()
+        self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", ""))
+        self.errmsg = "Expected end of line"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if loc < len(instring):
+            if instring[loc] == "\n":
+                return loc + 1, "\n"
+            else:
+                raise ParseException(instring, loc, self.errmsg, self)
+        elif loc == len(instring):
+            return loc + 1, []
+        else:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+class StringStart(_PositionToken):
+    """Matches if current position is at the beginning of the parse
+    string
+    """
+    def __init__(self):
+        super(StringStart, self).__init__()
+        self.errmsg = "Expected start of text"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if loc != 0:
+            # see if entire string up to here is just whitespace and ignoreables
+            if loc != self.preParse(instring, 0):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+class StringEnd(_PositionToken):
+    """Matches if current position is at the end of the parse string
+    """
+    def __init__(self):
+        super(StringEnd, self).__init__()
+        self.errmsg = "Expected end of text"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if loc < len(instring):
+            raise ParseException(instring, loc, self.errmsg, self)
+        elif loc == len(instring):
+            return loc + 1, []
+        elif loc > len(instring):
+            return loc, []
+        else:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+    """Matches if the current position is at the beginning of a Word,
+    and is not preceded by any character in a given set of
+    ``wordChars`` (default= ``printables``). To emulate the
+    ``\b`` behavior of regular expressions, use
+    ``WordStart(alphanums)``. ``WordStart`` will also match at
+    the beginning of the string being parsed, or at the beginning of
+    a line.
+    """
+    def __init__(self, wordChars=printables):
+        super(WordStart, self).__init__()
+        self.wordChars = set(wordChars)
+        self.errmsg = "Not at the start of a word"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if loc != 0:
+            if (instring[loc - 1] in self.wordChars
+                    or instring[loc] not in self.wordChars):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+class WordEnd(_PositionToken):
+    """Matches if the current position is at the end of a Word, and is
+    not followed by any character in a given set of ``wordChars``
+    (default= ``printables``). To emulate the ``\b`` behavior of
+    regular expressions, use ``WordEnd(alphanums)``. ``WordEnd``
+    will also match at the end of the string being parsed, or at the end
+    of a line.
+    """
+    def __init__(self, wordChars=printables):
+        super(WordEnd, self).__init__()
+        self.wordChars = set(wordChars)
+        self.skipWhitespace = False
+        self.errmsg = "Not at the end of a word"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        instrlen = len(instring)
+        if instrlen > 0 and loc < instrlen:
+            if (instring[loc] in self.wordChars or
+                    instring[loc - 1] not in self.wordChars):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+
+class ParseExpression(ParserElement):
+    """Abstract subclass of ParserElement, for combining and
+    post-processing parsed tokens.
+    """
+    def __init__(self, exprs, savelist=False):
+        super(ParseExpression, self).__init__(savelist)
+        if isinstance(exprs, _generatorType):
+            exprs = list(exprs)
+
+        if isinstance(exprs, basestring):
+            self.exprs = [self._literalStringClass(exprs)]
+        elif isinstance(exprs, ParserElement):
+            self.exprs = [exprs]
+        elif isinstance(exprs, Iterable):
+            exprs = list(exprs)
+            # if sequence of strings provided, wrap with Literal
+            if any(isinstance(expr, basestring) for expr in exprs):
+                exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs)
+            self.exprs = list(exprs)
+        else:
+            try:
+                self.exprs = list(exprs)
+            except TypeError:
+                self.exprs = [exprs]
+        self.callPreparse = False
+
+    def append(self, other):
+        self.exprs.append(other)
+        self.strRepr = None
+        return self
+
+    def leaveWhitespace(self):
+        """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on
+           all contained expressions."""
+        self.skipWhitespace = False
+        self.exprs = [e.copy() for e in self.exprs]
+        for e in self.exprs:
+            e.leaveWhitespace()
+        return self
+
+    def ignore(self, other):
+        if isinstance(other, Suppress):
+            if other not in self.ignoreExprs:
+                super(ParseExpression, self).ignore(other)
+                for e in self.exprs:
+                    e.ignore(self.ignoreExprs[-1])
+        else:
+            super(ParseExpression, self).ignore(other)
+            for e in self.exprs:
+                e.ignore(self.ignoreExprs[-1])
+        return self
+
+    def __str__(self):
+        try:
+            return super(ParseExpression, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs))
+        return self.strRepr
+
+    def streamline(self):
+        super(ParseExpression, self).streamline()
+
+        for e in self.exprs:
+            e.streamline()
+
+        # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d)
+        # but only if there are no parse actions or resultsNames on the nested And's
+        # (likewise for Or's and MatchFirst's)
+        if len(self.exprs) == 2:
+            other = self.exprs[0]
+            if (isinstance(other, self.__class__)
+                    and not other.parseAction
+                    and other.resultsName is None
+                    and not other.debug):
+                self.exprs = other.exprs[:] + [self.exprs[1]]
+                self.strRepr = None
+                self.mayReturnEmpty |= other.mayReturnEmpty
+                self.mayIndexError  |= other.mayIndexError
+
+            other = self.exprs[-1]
+            if (isinstance(other, self.__class__)
+                    and not other.parseAction
+                    and other.resultsName is None
+                    and not other.debug):
+                self.exprs = self.exprs[:-1] + other.exprs[:]
+                self.strRepr = None
+                self.mayReturnEmpty |= other.mayReturnEmpty
+                self.mayIndexError  |= other.mayIndexError
+
+        self.errmsg = "Expected " + _ustr(self)
+
+        return self
+
+    def validate(self, validateTrace=None):
+        tmp = (validateTrace if validateTrace is not None else [])[:] + [self]
+        for e in self.exprs:
+            e.validate(tmp)
+        self.checkRecursion([])
+
+    def copy(self):
+        ret = super(ParseExpression, self).copy()
+        ret.exprs = [e.copy() for e in self.exprs]
+        return ret
+
+    def _setResultsName(self, name, listAllMatches=False):
+        if __diag__.warn_ungrouped_named_tokens_in_collection:
+            for e in self.exprs:
+                if isinstance(e, ParserElement) and e.resultsName:
+                    warnings.warn("{0}: setting results name {1!r} on {2} expression "
+                                  "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection",
+                                                                                       name,
+                                                                                       type(self).__name__,
+                                                                                       e.resultsName),
+                                  stacklevel=3)
+
+        return super(ParseExpression, self)._setResultsName(name, listAllMatches)
+
+
+class And(ParseExpression):
+    """
+    Requires all given :class:`ParseExpression` s to be found in the given order.
+    Expressions may be separated by whitespace.
+    May be constructed using the ``'+'`` operator.
+    May also be constructed using the ``'-'`` operator, which will
+    suppress backtracking.
+
+    Example::
+
+        integer = Word(nums)
+        name_expr = OneOrMore(Word(alphas))
+
+        expr = And([integer("id"), name_expr("name"), integer("age")])
+        # more easily written as:
+        expr = integer("id") + name_expr("name") + integer("age")
+    """
+
+    class _ErrorStop(Empty):
+        def __init__(self, *args, **kwargs):
+            super(And._ErrorStop, self).__init__(*args, **kwargs)
+            self.name = '-'
+            self.leaveWhitespace()
+
+    def __init__(self, exprs, savelist=True):
+        exprs = list(exprs)
+        if exprs and Ellipsis in exprs:
+            tmp = []
+            for i, expr in enumerate(exprs):
+                if expr is Ellipsis:
+                    if i < len(exprs) - 1:
+                        skipto_arg = (Empty() + exprs[i + 1]).exprs[-1]
+                        tmp.append(SkipTo(skipto_arg)("_skipped*"))
+                    else:
+                        raise Exception("cannot construct And with sequence ending in ...")
+                else:
+                    tmp.append(expr)
+            exprs[:] = tmp
+        super(And, self).__init__(exprs, savelist)
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        self.setWhitespaceChars(self.exprs[0].whiteChars)
+        self.skipWhitespace = self.exprs[0].skipWhitespace
+        self.callPreparse = True
+
+    def streamline(self):
+        # collapse any _PendingSkip's
+        if self.exprs:
+            if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip)
+                   for e in self.exprs[:-1]):
+                for i, e in enumerate(self.exprs[:-1]):
+                    if e is None:
+                        continue
+                    if (isinstance(e, ParseExpression)
+                            and e.exprs and isinstance(e.exprs[-1], _PendingSkip)):
+                        e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1]
+                        self.exprs[i + 1] = None
+                self.exprs = [e for e in self.exprs if e is not None]
+
+        super(And, self).streamline()
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        return self
+
+    def parseImpl(self, instring, loc, doActions=True):
+        # pass False as last arg to _parse for first element, since we already
+        # pre-parsed the string as part of our And pre-parsing
+        loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False)
+        errorStop = False
+        for e in self.exprs[1:]:
+            if isinstance(e, And._ErrorStop):
+                errorStop = True
+                continue
+            if errorStop:
+                try:
+                    loc, exprtokens = e._parse(instring, loc, doActions)
+                except ParseSyntaxException:
+                    raise
+                except ParseBaseException as pe:
+                    pe.__traceback__ = None
+                    raise ParseSyntaxException._from_exception(pe)
+                except IndexError:
+                    raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
+            else:
+                loc, exprtokens = e._parse(instring, loc, doActions)
+            if exprtokens or exprtokens.haskeys():
+                resultlist += exprtokens
+        return loc, resultlist
+
+    def __iadd__(self, other):
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        return self.append(other)  # And([self, other])
+
+    def checkRecursion(self, parseElementList):
+        subRecCheckList = parseElementList[:] + [self]
+        for e in self.exprs:
+            e.checkRecursion(subRecCheckList)
+            if not e.mayReturnEmpty:
+                break
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+
+class Or(ParseExpression):
+    """Requires that at least one :class:`ParseExpression` is found. If
+    two expressions match, the expression that matches the longest
+    string will be used. May be constructed using the ``'^'``
+    operator.
+
+    Example::
+
+        # construct Or using '^' operator
+
+        number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
+        print(number.searchString("123 3.1416 789"))
+
+    prints::
+
+        [['123'], ['3.1416'], ['789']]
+    """
+    def __init__(self, exprs, savelist=False):
+        super(Or, self).__init__(exprs, savelist)
+        if self.exprs:
+            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+        else:
+            self.mayReturnEmpty = True
+
+    def streamline(self):
+        super(Or, self).streamline()
+        if __compat__.collect_all_And_tokens:
+            self.saveAsList = any(e.saveAsList for e in self.exprs)
+        return self
+
+    def parseImpl(self, instring, loc, doActions=True):
+        maxExcLoc = -1
+        maxException = None
+        matches = []
+        for e in self.exprs:
+            try:
+                loc2 = e.tryParse(instring, loc)
+            except ParseException as err:
+                err.__traceback__ = None
+                if err.loc > maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring, len(instring), e.errmsg, self)
+                    maxExcLoc = len(instring)
+            else:
+                # save match among all matches, to retry longest to shortest
+                matches.append((loc2, e))
+
+        if matches:
+            # re-evaluate all matches in descending order of length of match, in case attached actions
+            # might change whether or how much they match of the input.
+            matches.sort(key=itemgetter(0), reverse=True)
+
+            if not doActions:
+                # no further conditions or parse actions to change the selection of
+                # alternative, so the first match will be the best match
+                best_expr = matches[0][1]
+                return best_expr._parse(instring, loc, doActions)
+
+            longest = -1, None
+            for loc1, expr1 in matches:
+                if loc1 <= longest[0]:
+                    # already have a longer match than this one will deliver, we are done
+                    return longest
+
+                try:
+                    loc2, toks = expr1._parse(instring, loc, doActions)
+                except ParseException as err:
+                    err.__traceback__ = None
+                    if err.loc > maxExcLoc:
+                        maxException = err
+                        maxExcLoc = err.loc
+                else:
+                    if loc2 >= loc1:
+                        return loc2, toks
+                    # didn't match as much as before
+                    elif loc2 > longest[0]:
+                        longest = loc2, toks
+
+            if longest != (-1, None):
+                return longest
+
+        if maxException is not None:
+            maxException.msg = self.errmsg
+            raise maxException
+        else:
+            raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+    def __ixor__(self, other):
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        return self.append(other)  # Or([self, other])
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion(self, parseElementList):
+        subRecCheckList = parseElementList[:] + [self]
+        for e in self.exprs:
+            e.checkRecursion(subRecCheckList)
+
+    def _setResultsName(self, name, listAllMatches=False):
+        if (not __compat__.collect_all_And_tokens
+                and __diag__.warn_multiple_tokens_in_named_alternation):
+            if any(isinstance(e, And) for e in self.exprs):
+                warnings.warn("{0}: setting results name {1!r} on {2} expression "
+                              "may only return a single token for an And alternative, "
+                              "in future will return the full list of tokens".format(
+                    "warn_multiple_tokens_in_named_alternation", name, type(self).__name__),
+                    stacklevel=3)
+
+        return super(Or, self)._setResultsName(name, listAllMatches)
+
+
+class MatchFirst(ParseExpression):
+    """Requires that at least one :class:`ParseExpression` is found. If
+    two expressions match, the first one listed is the one that will
+    match. May be constructed using the ``'|'`` operator.
+
+    Example::
+
+        # construct MatchFirst using '|' operator
+
+        # watch the order of expressions to match
+        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+        # put more selective expression first
+        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
+    """
+    def __init__(self, exprs, savelist=False):
+        super(MatchFirst, self).__init__(exprs, savelist)
+        if self.exprs:
+            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+        else:
+            self.mayReturnEmpty = True
+
+    def streamline(self):
+        super(MatchFirst, self).streamline()
+        if __compat__.collect_all_And_tokens:
+            self.saveAsList = any(e.saveAsList for e in self.exprs)
+        return self
+
+    def parseImpl(self, instring, loc, doActions=True):
+        maxExcLoc = -1
+        maxException = None
+        for e in self.exprs:
+            try:
+                ret = e._parse(instring, loc, doActions)
+                return ret
+            except ParseException as err:
+                if err.loc > maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring, len(instring), e.errmsg, self)
+                    maxExcLoc = len(instring)
+
+        # only got here if no expression matched, raise exception for match that made it the furthest
+        else:
+            if maxException is not None:
+                maxException.msg = self.errmsg
+                raise maxException
+            else:
+                raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+    def __ior__(self, other):
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        return self.append(other)  # MatchFirst([self, other])
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion(self, parseElementList):
+        subRecCheckList = parseElementList[:] + [self]
+        for e in self.exprs:
+            e.checkRecursion(subRecCheckList)
+
+    def _setResultsName(self, name, listAllMatches=False):
+        if (not __compat__.collect_all_And_tokens
+                and __diag__.warn_multiple_tokens_in_named_alternation):
+            if any(isinstance(e, And) for e in self.exprs):
+                warnings.warn("{0}: setting results name {1!r} on {2} expression "
+                              "may only return a single token for an And alternative, "
+                              "in future will return the full list of tokens".format(
+                    "warn_multiple_tokens_in_named_alternation", name, type(self).__name__),
+                    stacklevel=3)
+
+        return super(MatchFirst, self)._setResultsName(name, listAllMatches)
+
+
+class Each(ParseExpression):
+    """Requires all given :class:`ParseExpression` s to be found, but in
+    any order. Expressions may be separated by whitespace.
+
+    May be constructed using the ``'&'`` operator.
+
+    Example::
+
+        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+        integer = Word(nums)
+        shape_attr = "shape:" + shape_type("shape")
+        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+        color_attr = "color:" + color("color")
+        size_attr = "size:" + integer("size")
+
+        # use Each (using operator '&') to accept attributes in any order
+        # (shape and posn are required, color and size are optional)
+        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+        shape_spec.runTests('''
+            shape: SQUARE color: BLACK posn: 100, 120
+            shape: CIRCLE size: 50 color: BLUE posn: 50,80
+            color:GREEN size:20 shape:TRIANGLE posn:20,40
+            '''
+            )
+
+    prints::
+
+        shape: SQUARE color: BLACK posn: 100, 120
+        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+        - color: BLACK
+        - posn: ['100', ',', '120']
+          - x: 100
+          - y: 120
+        - shape: SQUARE
+
+
+        shape: CIRCLE size: 50 color: BLUE posn: 50,80
+        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+        - color: BLUE
+        - posn: ['50', ',', '80']
+          - x: 50
+          - y: 80
+        - shape: CIRCLE
+        - size: 50
+
+
+        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+        - color: GREEN
+        - posn: ['20', ',', '40']
+          - x: 20
+          - y: 40
+        - shape: TRIANGLE
+        - size: 20
+    """
+    def __init__(self, exprs, savelist=True):
+        super(Each, self).__init__(exprs, savelist)
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        self.skipWhitespace = True
+        self.initExprGroups = True
+        self.saveAsList = True
+
+    def streamline(self):
+        super(Each, self).streamline()
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        return self
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.initExprGroups:
+            self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional))
+            opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)]
+            opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))]
+            self.optionals = opt1 + opt2
+            self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)]
+            self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)]
+            self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))]
+            self.required += self.multirequired
+            self.initExprGroups = False
+        tmpLoc = loc
+        tmpReqd = self.required[:]
+        tmpOpt  = self.optionals[:]
+        matchOrder = []
+
+        keepMatching = True
+        while keepMatching:
+            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+            failed = []
+            for e in tmpExprs:
+                try:
+                    tmpLoc = e.tryParse(instring, tmpLoc)
+                except ParseException:
+                    failed.append(e)
+                else:
+                    matchOrder.append(self.opt1map.get(id(e), e))
+                    if e in tmpReqd:
+                        tmpReqd.remove(e)
+                    elif e in tmpOpt:
+                        tmpOpt.remove(e)
+            if len(failed) == len(tmpExprs):
+                keepMatching = False
+
+        if tmpReqd:
+            missing = ", ".join(_ustr(e) for e in tmpReqd)
+            raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing)
+
+        # add any unmatched Optionals, in case they have default values defined
+        matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt]
+
+        resultlist = []
+        for e in matchOrder:
+            loc, results = e._parse(instring, loc, doActions)
+            resultlist.append(results)
+
+        finalResults = sum(resultlist, ParseResults([]))
+        return loc, finalResults
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion(self, parseElementList):
+        subRecCheckList = parseElementList[:] + [self]
+        for e in self.exprs:
+            e.checkRecursion(subRecCheckList)
+
+
+class ParseElementEnhance(ParserElement):
+    """Abstract subclass of :class:`ParserElement`, for combining and
+    post-processing parsed tokens.
+    """
+    def __init__(self, expr, savelist=False):
+        super(ParseElementEnhance, self).__init__(savelist)
+        if isinstance(expr, basestring):
+            if issubclass(self._literalStringClass, Token):
+                expr = self._literalStringClass(expr)
+            else:
+                expr = self._literalStringClass(Literal(expr))
+        self.expr = expr
+        self.strRepr = None
+        if expr is not None:
+            self.mayIndexError = expr.mayIndexError
+            self.mayReturnEmpty = expr.mayReturnEmpty
+            self.setWhitespaceChars(expr.whiteChars)
+            self.skipWhitespace = expr.skipWhitespace
+            self.saveAsList = expr.saveAsList
+            self.callPreparse = expr.callPreparse
+            self.ignoreExprs.extend(expr.ignoreExprs)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.expr is not None:
+            return self.expr._parse(instring, loc, doActions, callPreParse=False)
+        else:
+            raise ParseException("", loc, self.errmsg, self)
+
+    def leaveWhitespace(self):
+        self.skipWhitespace = False
+        self.expr = self.expr.copy()
+        if self.expr is not None:
+            self.expr.leaveWhitespace()
+        return self
+
+    def ignore(self, other):
+        if isinstance(other, Suppress):
+            if other not in self.ignoreExprs:
+                super(ParseElementEnhance, self).ignore(other)
+                if self.expr is not None:
+                    self.expr.ignore(self.ignoreExprs[-1])
+        else:
+            super(ParseElementEnhance, self).ignore(other)
+            if self.expr is not None:
+                self.expr.ignore(self.ignoreExprs[-1])
+        return self
+
+    def streamline(self):
+        super(ParseElementEnhance, self).streamline()
+        if self.expr is not None:
+            self.expr.streamline()
+        return self
+
+    def checkRecursion(self, parseElementList):
+        if self in parseElementList:
+            raise RecursiveGrammarException(parseElementList + [self])
+        subRecCheckList = parseElementList[:] + [self]
+        if self.expr is not None:
+            self.expr.checkRecursion(subRecCheckList)
+
+    def validate(self, validateTrace=None):
+        if validateTrace is None:
+            validateTrace = []
+        tmp = validateTrace[:] + [self]
+        if self.expr is not None:
+            self.expr.validate(tmp)
+        self.checkRecursion([])
+
+    def __str__(self):
+        try:
+            return super(ParseElementEnhance, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None and self.expr is not None:
+            self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr))
+        return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+    """Lookahead matching of the given parse expression.
+    ``FollowedBy`` does *not* advance the parsing position within
+    the input string, it only verifies that the specified parse
+    expression matches at the current position.  ``FollowedBy``
+    always returns a null token list. If any results names are defined
+    in the lookahead expression, those *will* be returned for access by
+    name.
+
+    Example::
+
+        # use FollowedBy to match a label only if it is followed by a ':'
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+
+    prints::
+
+        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+    """
+    def __init__(self, expr):
+        super(FollowedBy, self).__init__(expr)
+        self.mayReturnEmpty = True
+
+    def parseImpl(self, instring, loc, doActions=True):
+        # by using self._expr.parse and deleting the contents of the returned ParseResults list
+        # we keep any named results that were defined in the FollowedBy expression
+        _, ret = self.expr._parse(instring, loc, doActions=doActions)
+        del ret[:]
+
+        return loc, ret
+
+
+class PrecededBy(ParseElementEnhance):
+    """Lookbehind matching of the given parse expression.
+    ``PrecededBy`` does not advance the parsing position within the
+    input string, it only verifies that the specified parse expression
+    matches prior to the current position.  ``PrecededBy`` always
+    returns a null token list, but if a results name is defined on the
+    given expression, it is returned.
+
+    Parameters:
+
+     - expr - expression that must match prior to the current parse
+       location
+     - retreat - (default= ``None``) - (int) maximum number of characters
+       to lookbehind prior to the current parse location
+
+    If the lookbehind expression is a string, Literal, Keyword, or
+    a Word or CharsNotIn with a specified exact or maximum length, then
+    the retreat parameter is not required. Otherwise, retreat must be
+    specified to give a maximum number of characters to look back from
+    the current parse position for a lookbehind match.
+
+    Example::
+
+        # VB-style variable names with type prefixes
+        int_var = PrecededBy("#") + pyparsing_common.identifier
+        str_var = PrecededBy("$") + pyparsing_common.identifier
+
+    """
+    def __init__(self, expr, retreat=None):
+        super(PrecededBy, self).__init__(expr)
+        self.expr = self.expr().leaveWhitespace()
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.exact = False
+        if isinstance(expr, str):
+            retreat = len(expr)
+            self.exact = True
+        elif isinstance(expr, (Literal, Keyword)):
+            retreat = expr.matchLen
+            self.exact = True
+        elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT:
+            retreat = expr.maxLen
+            self.exact = True
+        elif isinstance(expr, _PositionToken):
+            retreat = 0
+            self.exact = True
+        self.retreat = retreat
+        self.errmsg = "not preceded by " + str(expr)
+        self.skipWhitespace = False
+        self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None)))
+
+    def parseImpl(self, instring, loc=0, doActions=True):
+        if self.exact:
+            if loc < self.retreat:
+                raise ParseException(instring, loc, self.errmsg)
+            start = loc - self.retreat
+            _, ret = self.expr._parse(instring, start)
+        else:
+            # retreat specified a maximum lookbehind window, iterate
+            test_expr = self.expr + StringEnd()
+            instring_slice = instring[max(0, loc - self.retreat):loc]
+            last_expr = ParseException(instring, loc, self.errmsg)
+            for offset in range(1, min(loc, self.retreat + 1)+1):
+                try:
+                    # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:]))
+                    _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset)
+                except ParseBaseException as pbe:
+                    last_expr = pbe
+                else:
+                    break
+            else:
+                raise last_expr
+        return loc, ret
+
+
+class NotAny(ParseElementEnhance):
+    """Lookahead to disallow matching with the given parse expression.
+    ``NotAny`` does *not* advance the parsing position within the
+    input string, it only verifies that the specified parse expression
+    does *not* match at the current position.  Also, ``NotAny`` does
+    *not* skip over leading whitespace. ``NotAny`` always returns
+    a null token list.  May be constructed using the '~' operator.
+
+    Example::
+
+        AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split())
+
+        # take care not to mistake keywords for identifiers
+        ident = ~(AND | OR | NOT) + Word(alphas)
+        boolean_term = Optional(NOT) + ident
+
+        # very crude boolean expression - to support parenthesis groups and
+        # operation hierarchy, use infixNotation
+        boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term)
+
+        # integers that are followed by "." are actually floats
+        integer = Word(nums) + ~Char(".")
+    """
+    def __init__(self, expr):
+        super(NotAny, self).__init__(expr)
+        # ~ self.leaveWhitespace()
+        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+        self.mayReturnEmpty = True
+        self.errmsg = "Found unwanted token, " + _ustr(self.expr)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.expr.canParseNext(instring, loc):
+            raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+        return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+    def __init__(self, expr, stopOn=None):
+        super(_MultipleMatch, self).__init__(expr)
+        self.saveAsList = True
+        ender = stopOn
+        if isinstance(ender, basestring):
+            ender = self._literalStringClass(ender)
+        self.stopOn(ender)
+
+    def stopOn(self, ender):
+        if isinstance(ender, basestring):
+            ender = self._literalStringClass(ender)
+        self.not_ender = ~ender if ender is not None else None
+        return self
+
+    def parseImpl(self, instring, loc, doActions=True):
+        self_expr_parse = self.expr._parse
+        self_skip_ignorables = self._skipIgnorables
+        check_ender = self.not_ender is not None
+        if check_ender:
+            try_not_ender = self.not_ender.tryParse
+
+        # must be at least one (but first see if we are the stopOn sentinel;
+        # if so, fail)
+        if check_ender:
+            try_not_ender(instring, loc)
+        loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False)
+        try:
+            hasIgnoreExprs = (not not self.ignoreExprs)
+            while 1:
+                if check_ender:
+                    try_not_ender(instring, loc)
+                if hasIgnoreExprs:
+                    preloc = self_skip_ignorables(instring, loc)
+                else:
+                    preloc = loc
+                loc, tmptokens = self_expr_parse(instring, preloc, doActions)
+                if tmptokens or tmptokens.haskeys():
+                    tokens += tmptokens
+        except (ParseException, IndexError):
+            pass
+
+        return loc, tokens
+
+    def _setResultsName(self, name, listAllMatches=False):
+        if __diag__.warn_ungrouped_named_tokens_in_collection:
+            for e in [self.expr] + getattr(self.expr, 'exprs', []):
+                if isinstance(e, ParserElement) and e.resultsName:
+                    warnings.warn("{0}: setting results name {1!r} on {2} expression "
+                                  "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection",
+                                                                                       name,
+                                                                                       type(self).__name__,
+                                                                                       e.resultsName),
+                                  stacklevel=3)
+
+        return super(_MultipleMatch, self)._setResultsName(name, listAllMatches)
+
+
+class OneOrMore(_MultipleMatch):
+    """Repetition of one or more of the given expression.
+
+    Parameters:
+     - expr - expression that must match one or more times
+     - stopOn - (default= ``None``) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition
+          expression)
+
+    Example::
+
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: BLACK"
+        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+
+        # could also be written as
+        (attr_expr * (1,)).parseString(text).pprint()
+    """
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+        return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+    """Optional repetition of zero or more of the given expression.
+
+    Parameters:
+     - expr - expression that must match zero or more times
+     - stopOn - (default= ``None``) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition
+          expression)
+
+    Example: similar to :class:`OneOrMore`
+    """
+    def __init__(self, expr, stopOn=None):
+        super(ZeroOrMore, self).__init__(expr, stopOn=stopOn)
+        self.mayReturnEmpty = True
+
+    def parseImpl(self, instring, loc, doActions=True):
+        try:
+            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+        except (ParseException, IndexError):
+            return loc, []
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+        return self.strRepr
+
+
+class _NullToken(object):
+    def __bool__(self):
+        return False
+    __nonzero__ = __bool__
+    def __str__(self):
+        return ""
+
+class Optional(ParseElementEnhance):
+    """Optional matching of the given expression.
+
+    Parameters:
+     - expr - expression that must match zero or more times
+     - default (optional) - value to be returned if the optional expression is not found.
+
+    Example::
+
+        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+        zip.runTests('''
+            # traditional ZIP code
+            12345
+
+            # ZIP+4 form
+            12101-0001
+
+            # invalid ZIP
+            98765-
+            ''')
+
+    prints::
+
+        # traditional ZIP code
+        12345
+        ['12345']
+
+        # ZIP+4 form
+        12101-0001
+        ['12101-0001']
+
+        # invalid ZIP
+        98765-
+             ^
+        FAIL: Expected end of text (at char 5), (line:1, col:6)
+    """
+    __optionalNotMatched = _NullToken()
+
+    def __init__(self, expr, default=__optionalNotMatched):
+        super(Optional, self).__init__(expr, savelist=False)
+        self.saveAsList = self.expr.saveAsList
+        self.defaultValue = default
+        self.mayReturnEmpty = True
+
+    def parseImpl(self, instring, loc, doActions=True):
+        try:
+            loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False)
+        except (ParseException, IndexError):
+            if self.defaultValue is not self.__optionalNotMatched:
+                if self.expr.resultsName:
+                    tokens = ParseResults([self.defaultValue])
+                    tokens[self.expr.resultsName] = self.defaultValue
+                else:
+                    tokens = [self.defaultValue]
+            else:
+                tokens = []
+        return loc, tokens
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]"
+
+        return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+    """Token for skipping over all undefined text until the matched
+    expression is found.
+
+    Parameters:
+     - expr - target expression marking the end of the data to be skipped
+     - include - (default= ``False``) if True, the target expression is also parsed
+          (the skipped text and target expression are returned as a 2-element list).
+     - ignore - (default= ``None``) used to define grammars (typically quoted strings and
+          comments) that might contain false matches to the target expression
+     - failOn - (default= ``None``) define expressions that are not allowed to be
+          included in the skipped test; if found before the target expression is found,
+          the SkipTo is not a match
+
+    Example::
+
+        report = '''
+            Outstanding Issues Report - 1 Jan 2000
+
+               # | Severity | Description                               |  Days Open
+            -----+----------+-------------------------------------------+-----------
+             101 | Critical | Intermittent system crash                 |          6
+              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
+              79 | Minor    | System slow when running too many reports |         47
+            '''
+        integer = Word(nums)
+        SEP = Suppress('|')
+        # use SkipTo to simply match everything up until the next SEP
+        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+        # - parse action will call token.strip() for each matched token, i.e., the description body
+        string_data = SkipTo(SEP, ignore=quotedString)
+        string_data.setParseAction(tokenMap(str.strip))
+        ticket_expr = (integer("issue_num") + SEP
+                      + string_data("sev") + SEP
+                      + string_data("desc") + SEP
+                      + integer("days_open"))
+
+        for tkt in ticket_expr.searchString(report):
+            print tkt.dump()
+
+    prints::
+
+        ['101', 'Critical', 'Intermittent system crash', '6']
+        - days_open: 6
+        - desc: Intermittent system crash
+        - issue_num: 101
+        - sev: Critical
+        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+        - days_open: 14
+        - desc: Spelling error on Login ('log|n')
+        - issue_num: 94
+        - sev: Cosmetic
+        ['79', 'Minor', 'System slow when running too many reports', '47']
+        - days_open: 47
+        - desc: System slow when running too many reports
+        - issue_num: 79
+        - sev: Minor
+    """
+    def __init__(self, other, include=False, ignore=None, failOn=None):
+        super(SkipTo, self).__init__(other)
+        self.ignoreExpr = ignore
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.includeMatch = include
+        self.saveAsList = False
+        if isinstance(failOn, basestring):
+            self.failOn = self._literalStringClass(failOn)
+        else:
+            self.failOn = failOn
+        self.errmsg = "No match found for " + _ustr(self.expr)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        startloc = loc
+        instrlen = len(instring)
+        expr = self.expr
+        expr_parse = self.expr._parse
+        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+
+        tmploc = loc
+        while tmploc <= instrlen:
+            if self_failOn_canParseNext is not None:
+                # break if failOn expression matches
+                if self_failOn_canParseNext(instring, tmploc):
+                    break
+
+            if self_ignoreExpr_tryParse is not None:
+                # advance past ignore expressions
+                while 1:
+                    try:
+                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+                    except ParseBaseException:
+                        break
+
+            try:
+                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+            except (ParseException, IndexError):
+                # no match, advance loc in string
+                tmploc += 1
+            else:
+                # matched skipto expr, done
+                break
+
+        else:
+            # ran off the end of the input string without matching skipto expr, fail
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        # build up return values
+        loc = tmploc
+        skiptext = instring[startloc:loc]
+        skipresult = ParseResults(skiptext)
+
+        if self.includeMatch:
+            loc, mat = expr_parse(instring, loc, doActions, callPreParse=False)
+            skipresult += mat
+
+        return loc, skipresult
+
+class Forward(ParseElementEnhance):
+    """Forward declaration of an expression to be defined later -
+    used for recursive grammars, such as algebraic infix notation.
+    When the expression is known, it is assigned to the ``Forward``
+    variable using the '<<' operator.
+
+    Note: take care when assigning to ``Forward`` not to overlook
+    precedence of operators.
+
+    Specifically, '|' has a lower precedence than '<<', so that::
+
+        fwdExpr << a | b | c
+
+    will actually be evaluated as::
+
+        (fwdExpr << a) | b | c
+
+    thereby leaving b and c out as parseable alternatives.  It is recommended that you
+    explicitly group the values inserted into the ``Forward``::
+
+        fwdExpr << (a | b | c)
+
+    Converting to use the '<<=' operator instead will avoid this problem.
+
+    See :class:`ParseResults.pprint` for an example of a recursive
+    parser created using ``Forward``.
+    """
+    def __init__(self, other=None):
+        super(Forward, self).__init__(other, savelist=False)
+
+    def __lshift__(self, other):
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        self.expr = other
+        self.strRepr = None
+        self.mayIndexError = self.expr.mayIndexError
+        self.mayReturnEmpty = self.expr.mayReturnEmpty
+        self.setWhitespaceChars(self.expr.whiteChars)
+        self.skipWhitespace = self.expr.skipWhitespace
+        self.saveAsList = self.expr.saveAsList
+        self.ignoreExprs.extend(self.expr.ignoreExprs)
+        return self
+
+    def __ilshift__(self, other):
+        return self << other
+
+    def leaveWhitespace(self):
+        self.skipWhitespace = False
+        return self
+
+    def streamline(self):
+        if not self.streamlined:
+            self.streamlined = True
+            if self.expr is not None:
+                self.expr.streamline()
+        return self
+
+    def validate(self, validateTrace=None):
+        if validateTrace is None:
+            validateTrace = []
+
+        if self not in validateTrace:
+            tmp = validateTrace[:] + [self]
+            if self.expr is not None:
+                self.expr.validate(tmp)
+        self.checkRecursion([])
+
+    def __str__(self):
+        if hasattr(self, "name"):
+            return self.name
+        if self.strRepr is not None:
+            return self.strRepr
+
+        # Avoid infinite recursion by setting a temporary strRepr
+        self.strRepr = ": ..."
+
+        # Use the string representation of main expression.
+        retString = '...'
+        try:
+            if self.expr is not None:
+                retString = _ustr(self.expr)[:1000]
+            else:
+                retString = "None"
+        finally:
+            self.strRepr = self.__class__.__name__ + ": " + retString
+        return self.strRepr
+
+    def copy(self):
+        if self.expr is not None:
+            return super(Forward, self).copy()
+        else:
+            ret = Forward()
+            ret <<= self
+            return ret
+
+    def _setResultsName(self, name, listAllMatches=False):
+        if __diag__.warn_name_set_on_empty_Forward:
+            if self.expr is None:
+                warnings.warn("{0}: setting results name {0!r} on {1} expression "
+                              "that has no contained expression".format("warn_name_set_on_empty_Forward",
+                                                                        name,
+                                                                        type(self).__name__),
+                              stacklevel=3)
+
+        return super(Forward, self)._setResultsName(name, listAllMatches)
+
+class TokenConverter(ParseElementEnhance):
+    """
+    Abstract subclass of :class:`ParseExpression`, for converting parsed results.
+    """
+    def __init__(self, expr, savelist=False):
+        super(TokenConverter, self).__init__(expr)  # , savelist)
+        self.saveAsList = False
+
+class Combine(TokenConverter):
+    """Converter to concatenate all matching tokens to a single string.
+    By default, the matching patterns must also be contiguous in the
+    input string; this can be disabled by specifying
+    ``'adjacent=False'`` in the constructor.
+
+    Example::
+
+        real = Word(nums) + '.' + Word(nums)
+        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+        # will also erroneously match the following
+        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+        real = Combine(Word(nums) + '.' + Word(nums))
+        print(real.parseString('3.1416')) # -> ['3.1416']
+        # no match when there are internal spaces
+        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+    """
+    def __init__(self, expr, joinString="", adjacent=True):
+        super(Combine, self).__init__(expr)
+        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+        if adjacent:
+            self.leaveWhitespace()
+        self.adjacent = adjacent
+        self.skipWhitespace = True
+        self.joinString = joinString
+        self.callPreparse = True
+
+    def ignore(self, other):
+        if self.adjacent:
+            ParserElement.ignore(self, other)
+        else:
+            super(Combine, self).ignore(other)
+        return self
+
+    def postParse(self, instring, loc, tokenlist):
+        retToks = tokenlist.copy()
+        del retToks[:]
+        retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults)
+
+        if self.resultsName and retToks.haskeys():
+            return [retToks]
+        else:
+            return retToks
+
+class Group(TokenConverter):
+    """Converter to return the matched tokens as a list - useful for
+    returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions.
+
+    Example::
+
+        ident = Word(alphas)
+        num = Word(nums)
+        term = ident | num
+        func = ident + Optional(delimitedList(term))
+        print(func.parseString("fn a, b, 100"))  # -> ['fn', 'a', 'b', '100']
+
+        func = ident + Group(Optional(delimitedList(term)))
+        print(func.parseString("fn a, b, 100"))  # -> ['fn', ['a', 'b', '100']]
+    """
+    def __init__(self, expr):
+        super(Group, self).__init__(expr)
+        self.saveAsList = True
+
+    def postParse(self, instring, loc, tokenlist):
+        return [tokenlist]
+
+class Dict(TokenConverter):
+    """Converter to return a repetitive expression as a list, but also
+    as a dictionary. Each element can also be referenced using the first
+    token in the expression as its key. Useful for tabular report
+    scraping when the first column can be used as a item key.
+
+    Example::
+
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+        # print attributes as plain groups
+        print(OneOrMore(attr_expr).parseString(text).dump())
+
+        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+        print(result.dump())
+
+        # access named fields as dict entries, or output as dict
+        print(result['shape'])
+        print(result.asDict())
+
+    prints::
+
+        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+
+    See more examples at :class:`ParseResults` of accessing fields by results name.
+    """
+    def __init__(self, expr):
+        super(Dict, self).__init__(expr)
+        self.saveAsList = True
+
+    def postParse(self, instring, loc, tokenlist):
+        for i, tok in enumerate(tokenlist):
+            if len(tok) == 0:
+                continue
+            ikey = tok[0]
+            if isinstance(ikey, int):
+                ikey = _ustr(tok[0]).strip()
+            if len(tok) == 1:
+                tokenlist[ikey] = _ParseResultsWithOffset("", i)
+            elif len(tok) == 2 and not isinstance(tok[1], ParseResults):
+                tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i)
+            else:
+                dictvalue = tok.copy()  # ParseResults(i)
+                del dictvalue[0]
+                if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()):
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i)
+                else:
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i)
+
+        if self.resultsName:
+            return [tokenlist]
+        else:
+            return tokenlist
+
+
+class Suppress(TokenConverter):
+    """Converter for ignoring the results of a parsed expression.
+
+    Example::
+
+        source = "a, b, c,d"
+        wd = Word(alphas)
+        wd_list1 = wd + ZeroOrMore(',' + wd)
+        print(wd_list1.parseString(source))
+
+        # often, delimiters that are useful during parsing are just in the
+        # way afterward - use Suppress to keep them out of the parsed output
+        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+        print(wd_list2.parseString(source))
+
+    prints::
+
+        ['a', ',', 'b', ',', 'c', ',', 'd']
+        ['a', 'b', 'c', 'd']
+
+    (See also :class:`delimitedList`.)
+    """
+    def postParse(self, instring, loc, tokenlist):
+        return []
+
+    def suppress(self):
+        return self
+
+
+class OnlyOnce(object):
+    """Wrapper for parse actions, to ensure they are only called once.
+    """
+    def __init__(self, methodCall):
+        self.callable = _trim_arity(methodCall)
+        self.called = False
+    def __call__(self, s, l, t):
+        if not self.called:
+            results = self.callable(s, l, t)
+            self.called = True
+            return results
+        raise ParseException(s, l, "")
+    def reset(self):
+        self.called = False
+
+def traceParseAction(f):
+    """Decorator for debugging parse actions.
+
+    When the parse action is called, this decorator will print
+    ``">> entering method-name(line:<current_source_line>, <parse_location>, <matched_tokens>)"``.
+    When the parse action completes, the decorator will print
+    ``"<<"`` followed by the returned value, or any exception that the parse action raised.
+
+    Example::
+
+        wd = Word(alphas)
+
+        @traceParseAction
+        def remove_duplicate_chars(tokens):
+            return ''.join(sorted(set(''.join(tokens))))
+
+        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+
+    prints::
+
+        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+        <<leaving remove_duplicate_chars (ret: 'dfjkls')
+        ['dfjkls']
+    """
+    f = _trim_arity(f)
+    def z(*paArgs):
+        thisFunc = f.__name__
+        s, l, t = paArgs[-3:]
+        if len(paArgs) > 3:
+            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+        sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t))
+        try:
+            ret = f(*paArgs)
+        except Exception as exc:
+            sys.stderr.write("<<leaving %s (exception: %s)\n" % (thisFunc, exc))
+            raise
+        sys.stderr.write("<<leaving %s (ret: %r)\n" % (thisFunc, ret))
+        return ret
+    try:
+        z.__name__ = f.__name__
+    except AttributeError:
+        pass
+    return z
+
+#
+# global helpers
+#
+def delimitedList(expr, delim=",", combine=False):
+    """Helper to define a delimited list of expressions - the delimiter
+    defaults to ','. By default, the list elements and delimiters can
+    have intervening whitespace, and comments, but this can be
+    overridden by passing ``combine=True`` in the constructor. If
+    ``combine`` is set to ``True``, the matching tokens are
+    returned as a single token string, with the delimiters included;
+    otherwise, the matching tokens are returned as a list of tokens,
+    with the delimiters suppressed.
+
+    Example::
+
+        delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
+        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+    """
+    dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..."
+    if combine:
+        return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName)
+    else:
+        return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName)
+
+def countedArray(expr, intExpr=None):
+    """Helper to define a counted list of expressions.
+
+    This helper defines a pattern of the form::
+
+        integer expr expr expr...
+
+    where the leading integer tells how many expr expressions follow.
+    The matched tokens returns the array of expr tokens as a list - the
+    leading count token is suppressed.
+
+    If ``intExpr`` is specified, it should be a pyparsing expression
+    that produces an integer value.
+
+    Example::
+
+        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']
+
+        # in this parser, the leading integer value is given in binary,
+        # '10' indicating that 2 values are in the array
+        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
+    """
+    arrayExpr = Forward()
+    def countFieldParseAction(s, l, t):
+        n = t[0]
+        arrayExpr << (n and Group(And([expr] * n)) or Group(empty))
+        return []
+    if intExpr is None:
+        intExpr = Word(nums).setParseAction(lambda t: int(t[0]))
+    else:
+        intExpr = intExpr.copy()
+    intExpr.setName("arrayLen")
+    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+    return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+    ret = []
+    for i in L:
+        if isinstance(i, list):
+            ret.extend(_flatten(i))
+        else:
+            ret.append(i)
+    return ret
+
+def matchPreviousLiteral(expr):
+    """Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks for
+    a 'repeat' of a previous expression.  For example::
+
+        first = Word(nums)
+        second = matchPreviousLiteral(first)
+        matchExpr = first + ":" + second
+
+    will match ``"1:1"``, but not ``"1:2"``.  Because this
+    matches a previous literal, will also match the leading
+    ``"1:1"`` in ``"1:10"``. If this is not desired, use
+    :class:`matchPreviousExpr`. Do *not* use with packrat parsing
+    enabled.
+    """
+    rep = Forward()
+    def copyTokenToRepeater(s, l, t):
+        if t:
+            if len(t) == 1:
+                rep << t[0]
+            else:
+                # flatten t tokens
+                tflat = _flatten(t.asList())
+                rep << And(Literal(tt) for tt in tflat)
+        else:
+            rep << Empty()
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def matchPreviousExpr(expr):
+    """Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks for
+    a 'repeat' of a previous expression.  For example::
+
+        first = Word(nums)
+        second = matchPreviousExpr(first)
+        matchExpr = first + ":" + second
+
+    will match ``"1:1"``, but not ``"1:2"``.  Because this
+    matches by expressions, will *not* match the leading ``"1:1"``
+    in ``"1:10"``; the expressions are evaluated first, and then
+    compared, so ``"1"`` is compared with ``"10"``. Do *not* use
+    with packrat parsing enabled.
+    """
+    rep = Forward()
+    e2 = expr.copy()
+    rep <<= e2
+    def copyTokenToRepeater(s, l, t):
+        matchTokens = _flatten(t.asList())
+        def mustMatchTheseTokens(s, l, t):
+            theseTokens = _flatten(t.asList())
+            if theseTokens != matchTokens:
+                raise ParseException('', 0, '')
+        rep.setParseAction(mustMatchTheseTokens, callDuringTry=True)
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def _escapeRegexRangeChars(s):
+    # ~  escape these chars: ^-[]
+    for c in r"\^-[]":
+        s = s.replace(c, _bslash + c)
+    s = s.replace("\n", r"\n")
+    s = s.replace("\t", r"\t")
+    return _ustr(s)
+
+def oneOf(strs, caseless=False, useRegex=True, asKeyword=False):
+    """Helper to quickly define a set of alternative Literals, and makes
+    sure to do longest-first testing when there is a conflict,
+    regardless of the input order, but returns
+    a :class:`MatchFirst` for best performance.
+
+    Parameters:
+
+     - strs - a string of space-delimited literals, or a collection of
+       string literals
+     - caseless - (default= ``False``) - treat all literals as
+       caseless
+     - useRegex - (default= ``True``) - as an optimization, will
+       generate a Regex object; otherwise, will generate
+       a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if
+       creating a :class:`Regex` raises an exception)
+     - asKeyword - (default=``False``) - enforce Keyword-style matching on the
+       generated expressions
+
+    Example::
+
+        comp_oper = oneOf("< = > <= >= !=")
+        var = Word(alphas)
+        number = Word(nums)
+        term = var | number
+        comparison_expr = term + comp_oper + term
+        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
+
+    prints::
+
+        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+    """
+    if isinstance(caseless, basestring):
+        warnings.warn("More than one string argument passed to oneOf, pass "
+                      "choices as a list or space-delimited string", stacklevel=2)
+
+    if caseless:
+        isequal = (lambda a, b: a.upper() == b.upper())
+        masks = (lambda a, b: b.upper().startswith(a.upper()))
+        parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral
+    else:
+        isequal = (lambda a, b: a == b)
+        masks = (lambda a, b: b.startswith(a))
+        parseElementClass = Keyword if asKeyword else Literal
+
+    symbols = []
+    if isinstance(strs, basestring):
+        symbols = strs.split()
+    elif isinstance(strs, Iterable):
+        symbols = list(strs)
+    else:
+        warnings.warn("Invalid argument to oneOf, expected string or iterable",
+                      SyntaxWarning, stacklevel=2)
+    if not symbols:
+        return NoMatch()
+
+    if not asKeyword:
+        # if not producing keywords, need to reorder to take care to avoid masking
+        # longer choices with shorter ones
+        i = 0
+        while i < len(symbols) - 1:
+            cur = symbols[i]
+            for j, other in enumerate(symbols[i + 1:]):
+                if isequal(other, cur):
+                    del symbols[i + j + 1]
+                    break
+                elif masks(cur, other):
+                    del symbols[i + j + 1]
+                    symbols.insert(i, other)
+                    break
+            else:
+                i += 1
+
+    if not (caseless or asKeyword) and useRegex:
+        # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols]))
+        try:
+            if len(symbols) == len("".join(symbols)):
+                return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols))
+            else:
+                return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols))
+        except Exception:
+            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+                    SyntaxWarning, stacklevel=2)
+
+    # last resort, just use MatchFirst
+    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf(key, value):
+    """Helper to easily and clearly define a dictionary by specifying
+    the respective patterns for the key and value.  Takes care of
+    defining the :class:`Dict`, :class:`ZeroOrMore`, and
+    :class:`Group` tokens in the proper order.  The key pattern
+    can include delimiting markers or punctuation, as long as they are
+    suppressed, thereby leaving the significant key text.  The value
+    pattern can include named results, so that the :class:`Dict` results
+    can include named token fields.
+
+    Example::
+
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        print(OneOrMore(attr_expr).parseString(text).dump())
+
+        attr_label = label
+        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+        # similar to Dict, but simpler call format
+        result = dictOf(attr_label, attr_value).parseString(text)
+        print(result.dump())
+        print(result['shape'])
+        print(result.shape)  # object attribute access works too
+        print(result.asDict())
+
+    prints::
+
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        SQUARE
+        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+    """
+    return Dict(OneOrMore(Group(key + value)))
+
+def originalTextFor(expr, asString=True):
+    """Helper to return the original, untokenized text for a given
+    expression.  Useful to restore the parsed fields of an HTML start
+    tag into the raw tag text itself, or to revert separate tokens with
+    intervening whitespace back to the original matching input text. By
+    default, returns astring containing the original parsed text.
+
+    If the optional ``asString`` argument is passed as
+    ``False``, then the return value is
+    a :class:`ParseResults` containing any results names that
+    were originally matched, and a single token containing the original
+    matched text from the input string.  So if the expression passed to
+    :class:`originalTextFor` contains expressions with defined
+    results names, you must set ``asString`` to ``False`` if you
+    want to preserve those results name values.
+
+    Example::
+
+        src = "this is test <b> bold <i>text</i> </b> normal text "
+        for tag in ("b", "i"):
+            opener, closer = makeHTMLTags(tag)
+            patt = originalTextFor(opener + SkipTo(closer) + closer)
+            print(patt.searchString(src)[0])
+
+    prints::
+
+        ['<b> bold <i>text</i> </b>']
+        ['<i>text</i>']
+    """
+    locMarker = Empty().setParseAction(lambda s, loc, t: loc)
+    endlocMarker = locMarker.copy()
+    endlocMarker.callPreparse = False
+    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+    if asString:
+        extractText = lambda s, l, t: s[t._original_start: t._original_end]
+    else:
+        def extractText(s, l, t):
+            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+    matchExpr.setParseAction(extractText)
+    matchExpr.ignoreExprs = expr.ignoreExprs
+    return matchExpr
+
+def ungroup(expr):
+    """Helper to undo pyparsing's default grouping of And expressions,
+    even if all but one are non-empty.
+    """
+    return TokenConverter(expr).addParseAction(lambda t: t[0])
+
+def locatedExpr(expr):
+    """Helper to decorate a returned token with its starting and ending
+    locations in the input string.
+
+    This helper adds the following results names:
+
+     - locn_start = location where matched expression begins
+     - locn_end = location where matched expression ends
+     - value = the actual parsed results
+
+    Be careful if the input text contains ``<TAB>`` characters, you
+    may want to call :class:`ParserElement.parseWithTabs`
+
+    Example::
+
+        wd = Word(alphas)
+        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+            print(match)
+
+    prints::
+
+        [[0, 'ljsdf', 5]]
+        [[8, 'lksdjjf', 15]]
+        [[18, 'lkkjj', 23]]
+    """
+    locator = Empty().setParseAction(lambda s, l, t: l)
+    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty       = Empty().setName("empty")
+lineStart   = LineStart().setName("lineStart")
+lineEnd     = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd   = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]"
+
+def srange(s):
+    r"""Helper to easily define string ranges for use in Word
+    construction. Borrows syntax from regexp '[]' string range
+    definitions::
+
+        srange("[0-9]")   -> "0123456789"
+        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
+        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+
+    The input string must be enclosed in []'s, and the returned string
+    is the expanded character set joined into a single string. The
+    values enclosed in the []'s may be:
+
+     - a single character
+     - an escaped character with a leading backslash (such as ``\-``
+       or ``\]``)
+     - an escaped hex character with a leading ``'\x'``
+       (``\x21``, which is a ``'!'`` character) (``\0x##``
+       is also supported for backwards compatibility)
+     - an escaped octal character with a leading ``'\0'``
+       (``\041``, which is a ``'!'`` character)
+     - a range of any of the above, separated by a dash (``'a-z'``,
+       etc.)
+     - any combination of the above (``'aeiouy'``,
+       ``'a-zA-Z0-9_$'``, etc.)
+    """
+    _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1))
+    try:
+        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+    except Exception:
+        return ""
+
+def matchOnlyAtCol(n):
+    """Helper method for defining parse actions that require matching at
+    a specific column in the input text.
+    """
+    def verifyCol(strg, locn, toks):
+        if col(locn, strg) != n:
+            raise ParseException(strg, locn, "matched token not at column %d" % n)
+    return verifyCol
+
+def replaceWith(replStr):
+    """Helper method for common parse actions that simply return
+    a literal value.  Especially useful when used with
+    :class:`transformString<ParserElement.transformString>` ().
+
+    Example::
+
+        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+        term = na | num
+
+        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+    """
+    return lambda s, l, t: [replStr]
+
+def removeQuotes(s, l, t):
+    """Helper parse action for removing quotation marks from parsed
+    quoted strings.
+
+    Example::
+
+        # by default, quotation marks are included in parsed results
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+        # use removeQuotes to strip quotation marks from parsed results
+        quotedString.setParseAction(removeQuotes)
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+    """
+    return t[0][1:-1]
+
+def tokenMap(func, *args):
+    """Helper to define a parse action by mapping a function to all
+    elements of a ParseResults list. If any additional args are passed,
+    they are forwarded to the given function as additional arguments
+    after the token, as in
+    ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``,
+    which will convert the parsed data to an integer using base 16.
+
+    Example (compare the last to example in :class:`ParserElement.transformString`::
+
+        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+        hex_ints.runTests('''
+            00 11 22 aa FF 0a 0d 1a
+            ''')
+
+        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+        OneOrMore(upperword).runTests('''
+            my kingdom for a horse
+            ''')
+
+        wd = Word(alphas).setParseAction(tokenMap(str.title))
+        OneOrMore(wd).setParseAction(' '.join).runTests('''
+            now is the winter of our discontent made glorious summer by this sun of york
+            ''')
+
+    prints::
+
+        00 11 22 aa FF 0a 0d 1a
+        [0, 17, 34, 170, 255, 10, 13, 26]
+
+        my kingdom for a horse
+        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+        now is the winter of our discontent made glorious summer by this sun of york
+        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+    """
+    def pa(s, l, t):
+        return [func(tokn, *args) for tokn in t]
+
+    try:
+        func_name = getattr(func, '__name__',
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    pa.__name__ = func_name
+
+    return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case.
+Deprecated in favor of :class:`pyparsing_common.upcaseTokens`"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case.
+Deprecated in favor of :class:`pyparsing_common.downcaseTokens`"""
+
+def _makeTags(tagStr, xml,
+              suppress_LT=Suppress("<"),
+              suppress_GT=Suppress(">")):
+    """Internal helper to construct opening and closing tag expressions, given a tag name"""
+    if isinstance(tagStr, basestring):
+        resname = tagStr
+        tagStr = Keyword(tagStr, caseless=not xml)
+    else:
+        resname = tagStr.name
+
+    tagAttrName = Word(alphas, alphanums + "_-:")
+    if xml:
+        tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes)
+        openTag = (suppress_LT
+                   + tagStr("tag")
+                   + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue)))
+                   + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/')
+                   + suppress_GT)
+    else:
+        tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">")
+        openTag = (suppress_LT
+                   + tagStr("tag")
+                   + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens)
+                                           + Optional(Suppress("=") + tagAttrValue))))
+                   + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/')
+                   + suppress_GT)
+    closeTag = Combine(_L("</") + tagStr + ">", adjacent=False)
+
+    openTag.setName("<%s>" % resname)
+    # add start<tagname> results name in parse action now that ungrouped names are not reported at two levels
+    openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy()))
+    closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("</%s>" % resname)
+    openTag.tag = resname
+    closeTag.tag = resname
+    openTag.tag_body = SkipTo(closeTag())
+    return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+    """Helper to construct opening and closing tag expressions for HTML,
+    given a tag name. Matches tags in either upper or lower case,
+    attributes with namespaces and with quoted or unquoted values.
+
+    Example::
+
+        text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
+        # makeHTMLTags returns pyparsing expressions for the opening and
+        # closing tags as a 2-tuple
+        a, a_end = makeHTMLTags("A")
+        link_expr = a + SkipTo(a_end)("link_text") + a_end
+
+        for link in link_expr.searchString(text):
+            # attributes in the <A> tag (like "href" shown here) are
+            # also accessible as named results
+            print(link.link_text, '->', link.href)
+
+    prints::
+
+        pyparsing -> https://github.com/pyparsing/pyparsing/wiki
+    """
+    return _makeTags(tagStr, False)
+
+def makeXMLTags(tagStr):
+    """Helper to construct opening and closing tag expressions for XML,
+    given a tag name. Matches tags only in the given upper/lower case.
+
+    Example: similar to :class:`makeHTMLTags`
+    """
+    return _makeTags(tagStr, True)
+
+def withAttribute(*args, **attrDict):
+    """Helper to create a validating parse action to be used with start
+    tags created with :class:`makeXMLTags` or
+    :class:`makeHTMLTags`. Use ``withAttribute`` to qualify
+    a starting tag with a required attribute value, to avoid false
+    matches on common tags such as ``<TD>`` or ``<DIV>``.
+
+    Call ``withAttribute`` with a series of attribute names and
+    values. Specify the list of filter attributes names and values as:
+
+     - keyword arguments, as in ``(align="right")``, or
+     - as an explicit dict with ``**`` operator, when an attribute
+       name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}``
+     - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))``
+
+    For attribute names with a namespace prefix, you must use the second
+    form.  Attribute names are matched insensitive to upper/lower case.
+
+    If just testing for ``class`` (with or without a namespace), use
+    :class:`withClass`.
+
+    To verify that the attribute exists, but without specifying a value,
+    pass ``withAttribute.ANY_VALUE`` as the value.
+
+    Example::
+
+        html = '''
+            <div>
+            Some text
+            <div type="grid">1 4 0 1 0</div>
+            <div type="graph">1,3 2,3 1,1</div>
+            <div>this has no type</div>
+            </div>
+
+        '''
+        div,div_end = makeHTMLTags("div")
+
+        # only match div tag having a type attribute with value "grid"
+        div_grid = div().setParseAction(withAttribute(type="grid"))
+        grid_expr = div_grid + SkipTo(div | div_end)("body")
+        for grid_header in grid_expr.searchString(html):
+            print(grid_header.body)
+
+        # construct a match with any div tag having a type attribute, regardless of the value
+        div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
+        div_expr = div_any_type + SkipTo(div | div_end)("body")
+        for div_header in div_expr.searchString(html):
+            print(div_header.body)
+
+    prints::
+
+        1 4 0 1 0
+
+        1 4 0 1 0
+        1,3 2,3 1,1
+    """
+    if args:
+        attrs = args[:]
+    else:
+        attrs = attrDict.items()
+    attrs = [(k, v) for k, v in attrs]
+    def pa(s, l, tokens):
+        for attrName, attrValue in attrs:
+            if attrName not in tokens:
+                raise ParseException(s, l, "no matching attribute " + attrName)
+            if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
+                raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" %
+                                            (attrName, tokens[attrName], attrValue))
+    return pa
+withAttribute.ANY_VALUE = object()
+
+def withClass(classname, namespace=''):
+    """Simplified version of :class:`withAttribute` when
+    matching on a div class - made difficult because ``class`` is
+    a reserved word in Python.
+
+    Example::
+
+        html = '''
+            <div>
+            Some text
+            <div class="grid">1 4 0 1 0</div>
+            <div class="graph">1,3 2,3 1,1</div>
+            <div>this &lt;div&gt; has no class</div>
+            </div>
+
+        '''
+        div,div_end = makeHTMLTags("div")
+        div_grid = div().setParseAction(withClass("grid"))
+
+        grid_expr = div_grid + SkipTo(div | div_end)("body")
+        for grid_header in grid_expr.searchString(html):
+            print(grid_header.body)
+
+        div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
+        div_expr = div_any_type + SkipTo(div | div_end)("body")
+        for div_header in div_expr.searchString(html):
+            print(div_header.body)
+
+    prints::
+
+        1 4 0 1 0
+
+        1 4 0 1 0
+        1,3 2,3 1,1
+    """
+    classattr = "%s:class" % namespace if namespace else "class"
+    return withAttribute(**{classattr: classname})
+
+opAssoc = SimpleNamespace()
+opAssoc.LEFT = object()
+opAssoc.RIGHT = object()
+
+def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')):
+    """Helper method for constructing grammars of expressions made up of
+    operators working in a precedence hierarchy.  Operators may be unary
+    or binary, left- or right-associative.  Parse actions can also be
+    attached to operator expressions. The generated parser will also
+    recognize the use of parentheses to override operator precedences
+    (see example below).
+
+    Note: if you define a deep operator list, you may see performance
+    issues when using infixNotation. See
+    :class:`ParserElement.enablePackrat` for a mechanism to potentially
+    improve your parser performance.
+
+    Parameters:
+     - baseExpr - expression representing the most basic element for the
+       nested
+     - opList - list of tuples, one for each operator precedence level
+       in the expression grammar; each tuple is of the form ``(opExpr,
+       numTerms, rightLeftAssoc, parseAction)``, where:
+
+       - opExpr is the pyparsing expression for the operator; may also
+         be a string, which will be converted to a Literal; if numTerms
+         is 3, opExpr is a tuple of two expressions, for the two
+         operators separating the 3 terms
+       - numTerms is the number of terms for this operator (must be 1,
+         2, or 3)
+       - rightLeftAssoc is the indicator whether the operator is right
+         or left associative, using the pyparsing-defined constants
+         ``opAssoc.RIGHT`` and ``opAssoc.LEFT``.
+       - parseAction is the parse action to be associated with
+         expressions matching this operator expression (the parse action
+         tuple member may be omitted); if the parse action is passed
+         a tuple or list of functions, this is equivalent to calling
+         ``setParseAction(*fn)``
+         (:class:`ParserElement.setParseAction`)
+     - lpar - expression for matching left-parentheses
+       (default= ``Suppress('(')``)
+     - rpar - expression for matching right-parentheses
+       (default= ``Suppress(')')``)
+
+    Example::
+
+        # simple example of four-function arithmetic with ints and
+        # variable names
+        integer = pyparsing_common.signed_integer
+        varname = pyparsing_common.identifier
+
+        arith_expr = infixNotation(integer | varname,
+            [
+            ('-', 1, opAssoc.RIGHT),
+            (oneOf('* /'), 2, opAssoc.LEFT),
+            (oneOf('+ -'), 2, opAssoc.LEFT),
+            ])
+
+        arith_expr.runTests('''
+            5+3*6
+            (5+3)*6
+            -2--11
+            ''', fullDump=False)
+
+    prints::
+
+        5+3*6
+        [[5, '+', [3, '*', 6]]]
+
+        (5+3)*6
+        [[[5, '+', 3], '*', 6]]
+
+        -2--11
+        [[['-', 2], '-', ['-', 11]]]
+    """
+    # captive version of FollowedBy that does not do parse actions or capture results names
+    class _FB(FollowedBy):
+        def parseImpl(self, instring, loc, doActions=True):
+            self.expr.tryParse(instring, loc)
+            return loc, []
+
+    ret = Forward()
+    lastExpr = baseExpr | (lpar + ret + rpar)
+    for i, operDef in enumerate(opList):
+        opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4]
+        termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
+        if arity == 3:
+            if opExpr is None or len(opExpr) != 2:
+                raise ValueError(
+                    "if numterms=3, opExpr must be a tuple or list of two expressions")
+            opExpr1, opExpr2 = opExpr
+        thisExpr = Forward().setName(termName)
+        if rightLeftAssoc == opAssoc.LEFT:
+            if arity == 1:
+                matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr))
+            elif arity == 2:
+                if opExpr is not None:
+                    matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr))
+                else:
+                    matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr))
+            elif arity == 3:
+                matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr)
+                             + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr)))
+            else:
+                raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+        elif rightLeftAssoc == opAssoc.RIGHT:
+            if arity == 1:
+                # try to avoid LR with this extra test
+                if not isinstance(opExpr, Optional):
+                    opExpr = Optional(opExpr)
+                matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr)
+            elif arity == 2:
+                if opExpr is not None:
+                    matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr))
+                else:
+                    matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr))
+            elif arity == 3:
+                matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)
+                             + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr))
+            else:
+                raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+        else:
+            raise ValueError("operator must indicate right or left associativity")
+        if pa:
+            if isinstance(pa, (tuple, list)):
+                matchExpr.setParseAction(*pa)
+            else:
+                matchExpr.setParseAction(pa)
+        thisExpr <<= (matchExpr.setName(termName) | lastExpr)
+        lastExpr = thisExpr
+    ret <<= lastExpr
+    return ret
+
+operatorPrecedence = infixNotation
+"""(Deprecated) Former name of :class:`infixNotation`, will be
+dropped in a future release."""
+
+dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes")
+sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes")
+quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"'
+                       | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes")
+unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
+
+def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
+    """Helper method for defining nested lists enclosed in opening and
+    closing delimiters ("(" and ")" are the default).
+
+    Parameters:
+     - opener - opening character for a nested list
+       (default= ``"("``); can also be a pyparsing expression
+     - closer - closing character for a nested list
+       (default= ``")"``); can also be a pyparsing expression
+     - content - expression for items within the nested lists
+       (default= ``None``)
+     - ignoreExpr - expression for ignoring opening and closing
+       delimiters (default= :class:`quotedString`)
+
+    If an expression is not provided for the content argument, the
+    nested expression will capture all whitespace-delimited content
+    between delimiters as a list of separate values.
+
+    Use the ``ignoreExpr`` argument to define expressions that may
+    contain opening or closing characters that should not be treated as
+    opening or closing characters for nesting, such as quotedString or
+    a comment expression.  Specify multiple expressions using an
+    :class:`Or` or :class:`MatchFirst`. The default is
+    :class:`quotedString`, but if no expressions are to be ignored, then
+    pass ``None`` for this argument.
+
+    Example::
+
+        data_type = oneOf("void int short long char float double")
+        decl_data_type = Combine(data_type + Optional(Word('*')))
+        ident = Word(alphas+'_', alphanums+'_')
+        number = pyparsing_common.number
+        arg = Group(decl_data_type + ident)
+        LPAR, RPAR = map(Suppress, "()")
+
+        code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))
+
+        c_function = (decl_data_type("type")
+                      + ident("name")
+                      + LPAR + Optional(delimitedList(arg), [])("args") + RPAR
+                      + code_body("body"))
+        c_function.ignore(cStyleComment)
+
+        source_code = '''
+            int is_odd(int x) {
+                return (x%2);
+            }
+
+            int dec_to_hex(char hchar) {
+                if (hchar >= '0' && hchar <= '9') {
+                    return (ord(hchar)-ord('0'));
+                } else {
+                    return (10+ord(hchar)-ord('A'));
+                }
+            }
+        '''
+        for func in c_function.searchString(source_code):
+            print("%(name)s (%(type)s) args: %(args)s" % func)
+
+
+    prints::
+
+        is_odd (int) args: [['int', 'x']]
+        dec_to_hex (int) args: [['char', 'hchar']]
+    """
+    if opener == closer:
+        raise ValueError("opening and closing strings cannot be the same")
+    if content is None:
+        if isinstance(opener, basestring) and isinstance(closer, basestring):
+            if len(opener) == 1 and len(closer) == 1:
+                if ignoreExpr is not None:
+                    content = (Combine(OneOrMore(~ignoreExpr
+                                                 + CharsNotIn(opener
+                                                              + closer
+                                                              + ParserElement.DEFAULT_WHITE_CHARS, exact=1)
+                                                 )
+                                       ).setParseAction(lambda t: t[0].strip()))
+                else:
+                    content = (empty.copy() + CharsNotIn(opener
+                                                         + closer
+                                                         + ParserElement.DEFAULT_WHITE_CHARS
+                                                         ).setParseAction(lambda t: t[0].strip()))
+            else:
+                if ignoreExpr is not None:
+                    content = (Combine(OneOrMore(~ignoreExpr
+                                                 + ~Literal(opener)
+                                                 + ~Literal(closer)
+                                                 + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1))
+                                       ).setParseAction(lambda t: t[0].strip()))
+                else:
+                    content = (Combine(OneOrMore(~Literal(opener)
+                                                 + ~Literal(closer)
+                                                 + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1))
+                                       ).setParseAction(lambda t: t[0].strip()))
+        else:
+            raise ValueError("opening and closing arguments must be strings if no content expression is given")
+    ret = Forward()
+    if ignoreExpr is not None:
+        ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer))
+    else:
+        ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content)  + Suppress(closer))
+    ret.setName('nested %s%s expression' % (opener, closer))
+    return ret
+
+def indentedBlock(blockStatementExpr, indentStack, indent=True):
+    """Helper method for defining space-delimited indentation blocks,
+    such as those used to define block statements in Python source code.
+
+    Parameters:
+
+     - blockStatementExpr - expression defining syntax of statement that
+       is repeated within the indented block
+     - indentStack - list created by caller to manage indentation stack
+       (multiple statementWithIndentedBlock expressions within a single
+       grammar should share a common indentStack)
+     - indent - boolean indicating whether block must be indented beyond
+       the current level; set to False for block of left-most
+       statements (default= ``True``)
+
+    A valid block must contain at least one ``blockStatement``.
+
+    Example::
+
+        data = '''
+        def A(z):
+          A1
+          B = 100
+          G = A2
+          A2
+          A3
+        B
+        def BB(a,b,c):
+          BB1
+          def BBA():
+            bba1
+            bba2
+            bba3
+        C
+        D
+        def spam(x,y):
+             def eggs(z):
+                 pass
+        '''
+
+
+        indentStack = [1]
+        stmt = Forward()
+
+        identifier = Word(alphas, alphanums)
+        funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":")
+        func_body = indentedBlock(stmt, indentStack)
+        funcDef = Group(funcDecl + func_body)
+
+        rvalue = Forward()
+        funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
+        rvalue << (funcCall | identifier | Word(nums))
+        assignment = Group(identifier + "=" + rvalue)
+        stmt << (funcDef | assignment | identifier)
+
+        module_body = OneOrMore(stmt)
+
+        parseTree = module_body.parseString(data)
+        parseTree.pprint()
+
+    prints::
+
+        [['def',
+          'A',
+          ['(', 'z', ')'],
+          ':',
+          [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
+         'B',
+         ['def',
+          'BB',
+          ['(', 'a', 'b', 'c', ')'],
+          ':',
+          [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
+         'C',
+         'D',
+         ['def',
+          'spam',
+          ['(', 'x', 'y', ')'],
+          ':',
+          [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
+    """
+    backup_stack = indentStack[:]
+
+    def reset_stack():
+        indentStack[:] = backup_stack
+
+    def checkPeerIndent(s, l, t):
+        if l >= len(s): return
+        curCol = col(l, s)
+        if curCol != indentStack[-1]:
+            if curCol > indentStack[-1]:
+                raise ParseException(s, l, "illegal nesting")
+            raise ParseException(s, l, "not a peer entry")
+
+    def checkSubIndent(s, l, t):
+        curCol = col(l, s)
+        if curCol > indentStack[-1]:
+            indentStack.append(curCol)
+        else:
+            raise ParseException(s, l, "not a subentry")
+
+    def checkUnindent(s, l, t):
+        if l >= len(s): return
+        curCol = col(l, s)
+        if not(indentStack and curCol in indentStack):
+            raise ParseException(s, l, "not an unindent")
+        if curCol < indentStack[-1]:
+            indentStack.pop()
+
+    NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd())
+    INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
+    PEER   = Empty().setParseAction(checkPeerIndent).setName('')
+    UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
+    if indent:
+        smExpr = Group(Optional(NL)
+                       + INDENT
+                       + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd())
+                       + UNDENT)
+    else:
+        smExpr = Group(Optional(NL)
+                       + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd())
+                       + UNDENT)
+    smExpr.setFailAction(lambda a, b, c, d: reset_stack())
+    blockStatementExpr.ignore(_bslash + LineEnd())
+    return smExpr.setName('indented block')
+
+alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
+punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
+
+anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag'))
+_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\''))
+commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
+def replaceHTMLEntity(t):
+    """Helper parser action to replace common HTML entities with their special characters"""
+    return _htmlEntityMap.get(t.entity)
+
+# it's easy to get these comment structures wrong - they're very common, so may as well make them available
+cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
+"Comment of the form ``/* ... */``"
+
+htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
+"Comment of the form ``<!-- ... -->``"
+
+restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
+dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
+"Comment of the form ``// ... (to end of line)``"
+
+cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment")
+"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`"
+
+javaStyleComment = cppStyleComment
+"Same as :class:`cppStyleComment`"
+
+pythonStyleComment = Regex(r"#.*").setName("Python style comment")
+"Comment of the form ``# ... (to end of line)``"
+
+_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',')
+                                  + Optional(Word(" \t")
+                                             + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem")
+commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList")
+"""(Deprecated) Predefined expression of 1 or more printable words or
+quoted strings, separated by commas.
+
+This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`.
+"""
+
+# some other useful expressions - using lower-case class name since we are really using this as a namespace
+class pyparsing_common:
+    """Here are some common low-level expressions that may be useful in
+    jump-starting parser development:
+
+     - numeric forms (:class:`integers<integer>`, :class:`reals<real>`,
+       :class:`scientific notation<sci_real>`)
+     - common :class:`programming identifiers<identifier>`
+     - network addresses (:class:`MAC<mac_address>`,
+       :class:`IPv4<ipv4_address>`, :class:`IPv6<ipv6_address>`)
+     - ISO8601 :class:`dates<iso8601_date>` and
+       :class:`datetime<iso8601_datetime>`
+     - :class:`UUID<uuid>`
+     - :class:`comma-separated list<comma_separated_list>`
+
+    Parse actions:
+
+     - :class:`convertToInteger`
+     - :class:`convertToFloat`
+     - :class:`convertToDate`
+     - :class:`convertToDatetime`
+     - :class:`stripHTMLTags`
+     - :class:`upcaseTokens`
+     - :class:`downcaseTokens`
+
+    Example::
+
+        pyparsing_common.number.runTests('''
+            # any int or real number, returned as the appropriate type
+            100
+            -100
+            +100
+            3.14159
+            6.02e23
+            1e-12
+            ''')
+
+        pyparsing_common.fnumber.runTests('''
+            # any int or real number, returned as float
+            100
+            -100
+            +100
+            3.14159
+            6.02e23
+            1e-12
+            ''')
+
+        pyparsing_common.hex_integer.runTests('''
+            # hex numbers
+            100
+            FF
+            ''')
+
+        pyparsing_common.fraction.runTests('''
+            # fractions
+            1/2
+            -3/4
+            ''')
+
+        pyparsing_common.mixed_integer.runTests('''
+            # mixed fractions
+            1
+            1/2
+            -3/4
+            1-3/4
+            ''')
+
+        import uuid
+        pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+        pyparsing_common.uuid.runTests('''
+            # uuid
+            12345678-1234-5678-1234-567812345678
+            ''')
+
+    prints::
+
+        # any int or real number, returned as the appropriate type
+        100
+        [100]
+
+        -100
+        [-100]
+
+        +100
+        [100]
+
+        3.14159
+        [3.14159]
+
+        6.02e23
+        [6.02e+23]
+
+        1e-12
+        [1e-12]
+
+        # any int or real number, returned as float
+        100
+        [100.0]
+
+        -100
+        [-100.0]
+
+        +100
+        [100.0]
+
+        3.14159
+        [3.14159]
+
+        6.02e23
+        [6.02e+23]
+
+        1e-12
+        [1e-12]
+
+        # hex numbers
+        100
+        [256]
+
+        FF
+        [255]
+
+        # fractions
+        1/2
+        [0.5]
+
+        -3/4
+        [-0.75]
+
+        # mixed fractions
+        1
+        [1]
+
+        1/2
+        [0.5]
+
+        -3/4
+        [-0.75]
+
+        1-3/4
+        [1.75]
+
+        # uuid
+        12345678-1234-5678-1234-567812345678
+        [UUID('12345678-1234-5678-1234-567812345678')]
+    """
+
+    convertToInteger = tokenMap(int)
+    """
+    Parse action for converting parsed integers to Python int
+    """
+
+    convertToFloat = tokenMap(float)
+    """
+    Parse action for converting parsed numbers to Python float
+    """
+
+    integer = Word(nums).setName("integer").setParseAction(convertToInteger)
+    """expression that parses an unsigned integer, returns an int"""
+
+    hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16))
+    """expression that parses a hexadecimal integer, returns an int"""
+
+    signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+    """expression that parses an integer with optional leading sign, returns an int"""
+
+    fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
+    """fractional expression of an integer divided by an integer, returns a float"""
+    fraction.addParseAction(lambda t: t[0]/t[-1])
+
+    mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+    """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
+    mixed_integer.addParseAction(sum)
+
+    real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat)
+    """expression that parses a floating point number and returns a float"""
+
+    sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+    """expression that parses a floating point number with optional
+    scientific notation and returns a float"""
+
+    # streamlining this expression makes the docs nicer-looking
+    number = (sci_real | real | signed_integer).streamline()
+    """any numeric expression, returns the corresponding Python type"""
+
+    fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
+    """any int or real number, returned as float"""
+
+    identifier = Word(alphas + '_', alphanums + '_').setName("identifier")
+    """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
+
+    ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
+    "IPv4 address (``0.0.0.0 - 255.255.255.255``)"
+
+    _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
+    _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address")
+    _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6))
+                           + "::"
+                           + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6))
+                           ).setName("short IPv6 address")
+    _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
+    _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
+    ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
+    "IPv6 address (long, short, or mixed form)"
+
+    mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
+    "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
+
+    @staticmethod
+    def convertToDate(fmt="%Y-%m-%d"):
+        """
+        Helper to create a parse action for converting parsed date string to Python datetime.date
+
+        Params -
+         - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``)
+
+        Example::
+
+            date_expr = pyparsing_common.iso8601_date.copy()
+            date_expr.setParseAction(pyparsing_common.convertToDate())
+            print(date_expr.parseString("1999-12-31"))
+
+        prints::
+
+            [datetime.date(1999, 12, 31)]
+        """
+        def cvt_fn(s, l, t):
+            try:
+                return datetime.strptime(t[0], fmt).date()
+            except ValueError as ve:
+                raise ParseException(s, l, str(ve))
+        return cvt_fn
+
+    @staticmethod
+    def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
+        """Helper to create a parse action for converting parsed
+        datetime string to Python datetime.datetime
+
+        Params -
+         - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``)
+
+        Example::
+
+            dt_expr = pyparsing_common.iso8601_datetime.copy()
+            dt_expr.setParseAction(pyparsing_common.convertToDatetime())
+            print(dt_expr.parseString("1999-12-31T23:59:59.999"))
+
+        prints::
+
+            [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
+        """
+        def cvt_fn(s, l, t):
+            try:
+                return datetime.strptime(t[0], fmt)
+            except ValueError as ve:
+                raise ParseException(s, l, str(ve))
+        return cvt_fn
+
+    iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
+    "ISO8601 date (``yyyy-mm-dd``)"
+
+    iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
+    "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``"
+
+    uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
+    "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)"
+
+    _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
+    @staticmethod
+    def stripHTMLTags(s, l, tokens):
+        """Parse action to remove HTML tags from web page HTML source
+
+        Example::
+
+            # strip HTML links from normal text
+            text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
+            td, td_end = makeHTMLTags("TD")
+            table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
+            print(table_text.parseString(text).body)
+
+        Prints::
+
+            More info at the pyparsing wiki page
+        """
+        return pyparsing_common._html_stripper.transformString(tokens[0])
+
+    _commasepitem = Combine(OneOrMore(~Literal(",")
+                                      + ~LineEnd()
+                                      + Word(printables, excludeChars=',')
+                                      + Optional(White(" \t")))).streamline().setName("commaItem")
+    comma_separated_list = delimitedList(Optional(quotedString.copy()
+                                                  | _commasepitem, default='')
+                                         ).setName("comma separated list")
+    """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+    upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+    """Parse action to convert tokens to upper case."""
+
+    downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+    """Parse action to convert tokens to lower case."""
+
+
+class _lazyclassproperty(object):
+    def __init__(self, fn):
+        self.fn = fn
+        self.__doc__ = fn.__doc__
+        self.__name__ = fn.__name__
+
+    def __get__(self, obj, cls):
+        if cls is None:
+            cls = type(obj)
+        if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', [])
+                                              for superclass in cls.__mro__[1:]):
+            cls._intern = {}
+        attrname = self.fn.__name__
+        if attrname not in cls._intern:
+            cls._intern[attrname] = self.fn(cls)
+        return cls._intern[attrname]
+
+
+class unicode_set(object):
+    """
+    A set of Unicode characters, for language-specific strings for
+    ``alphas``, ``nums``, ``alphanums``, and ``printables``.
+    A unicode_set is defined by a list of ranges in the Unicode character
+    set, in a class attribute ``_ranges``, such as::
+
+        _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),]
+
+    A unicode set can also be defined using multiple inheritance of other unicode sets::
+
+        class CJK(Chinese, Japanese, Korean):
+            pass
+    """
+    _ranges = []
+
+    @classmethod
+    def _get_chars_for_ranges(cls):
+        ret = []
+        for cc in cls.__mro__:
+            if cc is unicode_set:
+                break
+            for rr in cc._ranges:
+                ret.extend(range(rr[0], rr[-1] + 1))
+        return [unichr(c) for c in sorted(set(ret))]
+
+    @_lazyclassproperty
+    def printables(cls):
+        "all non-whitespace characters in this range"
+        return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges()))
+
+    @_lazyclassproperty
+    def alphas(cls):
+        "all alphabetic characters in this range"
+        return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges()))
+
+    @_lazyclassproperty
+    def nums(cls):
+        "all numeric digit characters in this range"
+        return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges()))
+
+    @_lazyclassproperty
+    def alphanums(cls):
+        "all alphanumeric characters in this range"
+        return cls.alphas + cls.nums
+
+
+class pyparsing_unicode(unicode_set):
+    """
+    A namespace class for defining common language unicode_sets.
+    """
+    _ranges = [(32, sys.maxunicode)]
+
+    class Latin1(unicode_set):
+        "Unicode set for Latin-1 Unicode Character Range"
+        _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),]
+
+    class LatinA(unicode_set):
+        "Unicode set for Latin-A Unicode Character Range"
+        _ranges = [(0x0100, 0x017f),]
+
+    class LatinB(unicode_set):
+        "Unicode set for Latin-B Unicode Character Range"
+        _ranges = [(0x0180, 0x024f),]
+
+    class Greek(unicode_set):
+        "Unicode set for Greek Unicode Character Ranges"
+        _ranges = [
+            (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d),
+            (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4),
+            (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe),
+        ]
+
+    class Cyrillic(unicode_set):
+        "Unicode set for Cyrillic Unicode Character Range"
+        _ranges = [(0x0400, 0x04ff)]
+
+    class Chinese(unicode_set):
+        "Unicode set for Chinese Unicode Character Range"
+        _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),]
+
+    class Japanese(unicode_set):
+        "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges"
+        _ranges = []
+
+        class Kanji(unicode_set):
+            "Unicode set for Kanji Unicode Character Range"
+            _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),]
+
+        class Hiragana(unicode_set):
+            "Unicode set for Hiragana Unicode Character Range"
+            _ranges = [(0x3040, 0x309f),]
+
+        class Katakana(unicode_set):
+            "Unicode set for Katakana  Unicode Character Range"
+            _ranges = [(0x30a0, 0x30ff),]
+
+    class Korean(unicode_set):
+        "Unicode set for Korean Unicode Character Range"
+        _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),]
+
+    class CJK(Chinese, Japanese, Korean):
+        "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range"
+        pass
+
+    class Thai(unicode_set):
+        "Unicode set for Thai Unicode Character Range"
+        _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),]
+
+    class Arabic(unicode_set):
+        "Unicode set for Arabic Unicode Character Range"
+        _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),]
+
+    class Hebrew(unicode_set):
+        "Unicode set for Hebrew Unicode Character Range"
+        _ranges = [(0x0590, 0x05ff),]
+
+    class Devanagari(unicode_set):
+        "Unicode set for Devanagari Unicode Character Range"
+        _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)]
+
+pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges
+                                      + pyparsing_unicode.Japanese.Hiragana._ranges
+                                      + pyparsing_unicode.Japanese.Katakana._ranges)
+
+# define ranges in language character sets
+if PY_3:
+    setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic)
+    setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese)
+    setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic)
+    setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek)
+    setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew)
+    setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese)
+    setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji)
+    setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana)
+    setattr(pyparsing_unicode.Japanese, u"ひらがな", pyparsing_unicode.Japanese.Hiragana)
+    setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean)
+    setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai)
+    setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari)
+
+
+class pyparsing_test:
+    """
+    namespace class for classes useful in writing unit tests
+    """
+
+    class reset_pyparsing_context:
+        """
+        Context manager to be used when writing unit tests that modify pyparsing config values:
+         - packrat parsing
+         - default whitespace characters.
+         - default keyword characters
+         - literal string auto-conversion class
+         - __diag__ settings
+
+        Example:
+            with reset_pyparsing_context():
+                # test that literals used to construct a grammar are automatically suppressed
+                ParserElement.inlineLiteralsUsing(Suppress)
+
+                term = Word(alphas) | Word(nums)
+                group = Group('(' + term[...] + ')')
+
+                # assert that the '()' characters are not included in the parsed tokens
+                self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def'])
+
+            # after exiting context manager, literals are converted to Literal expressions again
+        """
+
+        def __init__(self):
+            self._save_context = {}
+
+        def save(self):
+            self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS
+            self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS
+            self._save_context[
+                "literal_string_class"
+            ] = ParserElement._literalStringClass
+            self._save_context["packrat_enabled"] = ParserElement._packratEnabled
+            self._save_context["packrat_parse"] = ParserElement._parse
+            self._save_context["__diag__"] = {
+                name: getattr(__diag__, name) for name in __diag__._all_names
+            }
+            self._save_context["__compat__"] = {
+                "collect_all_And_tokens": __compat__.collect_all_And_tokens
+            }
+            return self
+
+        def restore(self):
+            # reset pyparsing global state
+            if (
+                ParserElement.DEFAULT_WHITE_CHARS
+                != self._save_context["default_whitespace"]
+            ):
+                ParserElement.setDefaultWhitespaceChars(
+                    self._save_context["default_whitespace"]
+                )
+            Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"]
+            ParserElement.inlineLiteralsUsing(
+                self._save_context["literal_string_class"]
+            )
+            for name, value in self._save_context["__diag__"].items():
+                setattr(__diag__, name, value)
+            ParserElement._packratEnabled = self._save_context["packrat_enabled"]
+            ParserElement._parse = self._save_context["packrat_parse"]
+            __compat__.collect_all_And_tokens = self._save_context["__compat__"]
+
+        def __enter__(self):
+            return self.save()
+
+        def __exit__(self, *args):
+            return self.restore()
+
+    class TestParseResultsAsserts:
+        """
+        A mixin class to add parse results assertion methods to normal unittest.TestCase classes.
+        """
+        def assertParseResultsEquals(
+            self, result, expected_list=None, expected_dict=None, msg=None
+        ):
+            """
+            Unit test assertion to compare a ParseResults object with an optional expected_list,
+            and compare any defined results names with an optional expected_dict.
+            """
+            if expected_list is not None:
+                self.assertEqual(expected_list, result.asList(), msg=msg)
+            if expected_dict is not None:
+                self.assertEqual(expected_dict, result.asDict(), msg=msg)
+
+        def assertParseAndCheckList(
+            self, expr, test_string, expected_list, msg=None, verbose=True
+        ):
+            """
+            Convenience wrapper assert to test a parser element and input string, and assert that
+            the resulting ParseResults.asList() is equal to the expected_list.
+            """
+            result = expr.parseString(test_string, parseAll=True)
+            if verbose:
+                print(result.dump())
+            self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg)
+
+        def assertParseAndCheckDict(
+            self, expr, test_string, expected_dict, msg=None, verbose=True
+        ):
+            """
+            Convenience wrapper assert to test a parser element and input string, and assert that
+            the resulting ParseResults.asDict() is equal to the expected_dict.
+            """
+            result = expr.parseString(test_string, parseAll=True)
+            if verbose:
+                print(result.dump())
+            self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg)
+
+        def assertRunTestResults(
+            self, run_tests_report, expected_parse_results=None, msg=None
+        ):
+            """
+            Unit test assertion to evaluate output of ParserElement.runTests(). If a list of
+            list-dict tuples is given as the expected_parse_results argument, then these are zipped
+            with the report tuples returned by runTests and evaluated using assertParseResultsEquals.
+            Finally, asserts that the overall runTests() success value is True.
+
+            :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests
+            :param expected_parse_results (optional): [tuple(str, list, dict, Exception)]
+            """
+            run_test_success, run_test_results = run_tests_report
+
+            if expected_parse_results is not None:
+                merged = [
+                    (rpt[0], rpt[1], expected)
+                    for rpt, expected in zip(run_test_results, expected_parse_results)
+                ]
+                for test_string, result, expected in merged:
+                    # expected should be a tuple containing a list and/or a dict or an exception,
+                    # and optional failure message string
+                    # an empty tuple will skip any result validation
+                    fail_msg = next(
+                        (exp for exp in expected if isinstance(exp, str)), None
+                    )
+                    expected_exception = next(
+                        (
+                            exp
+                            for exp in expected
+                            if isinstance(exp, type) and issubclass(exp, Exception)
+                        ),
+                        None,
+                    )
+                    if expected_exception is not None:
+                        with self.assertRaises(
+                            expected_exception=expected_exception, msg=fail_msg or msg
+                        ):
+                            if isinstance(result, Exception):
+                                raise result
+                    else:
+                        expected_list = next(
+                            (exp for exp in expected if isinstance(exp, list)), None
+                        )
+                        expected_dict = next(
+                            (exp for exp in expected if isinstance(exp, dict)), None
+                        )
+                        if (expected_list, expected_dict) != (None, None):
+                            self.assertParseResultsEquals(
+                                result,
+                                expected_list=expected_list,
+                                expected_dict=expected_dict,
+                                msg=fail_msg or msg,
+                            )
+                        else:
+                            # warning here maybe?
+                            print("no validation for {!r}".format(test_string))
+
+            # do this last, in case some specific test results can be reported instead
+            self.assertTrue(
+                run_test_success, msg=msg if msg is not None else "failed runTests"
+            )
+
+        @contextmanager
+        def assertRaisesParseException(self, exc_type=ParseException, msg=None):
+            with self.assertRaises(exc_type, msg=msg):
+                yield
+
+
+if __name__ == "__main__":
+
+    selectToken    = CaselessLiteral("select")
+    fromToken      = CaselessLiteral("from")
+
+    ident          = Word(alphas, alphanums + "_$")
+
+    columnName     = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+    columnNameList = Group(delimitedList(columnName)).setName("columns")
+    columnSpec     = ('*' | columnNameList)
+
+    tableName      = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+    tableNameList  = Group(delimitedList(tableName)).setName("tables")
+
+    simpleSQL      = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")
+
+    # demo runTests method, including embedded comments in test string
+    simpleSQL.runTests("""
+        # '*' as column list and dotted table name
+        select * from SYS.XYZZY
+
+        # caseless match on "SELECT", and casts back to "select"
+        SELECT * from XYZZY, ABC
+
+        # list of column names, and mixed case SELECT keyword
+        Select AA,BB,CC from Sys.dual
+
+        # multiple tables
+        Select A, B, C from Sys.dual, Table2
+
+        # invalid SELECT keyword - should fail
+        Xelect A, B, C from Sys.dual
+
+        # incomplete command - should fail
+        Select
+
+        # invalid column name - should fail
+        Select ^^^ frox Sys.dual
+
+        """)
+
+    pyparsing_common.number.runTests("""
+        100
+        -100
+        +100
+        3.14159
+        6.02e23
+        1e-12
+        """)
+
+    # any int or real number, returned as float
+    pyparsing_common.fnumber.runTests("""
+        100
+        -100
+        +100
+        3.14159
+        6.02e23
+        1e-12
+        """)
+
+    pyparsing_common.hex_integer.runTests("""
+        100
+        FF
+        """)
+
+    import uuid
+    pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+    pyparsing_common.uuid.runTests("""
+        12345678-1234-5678-1234-567812345678
+        """)
diff --git a/venv/lib/python3.7/site-packages/six.py b/venv/lib/python3.7/site-packages/six.py
new file mode 100644
index 00000000..83f69783
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/six.py
@@ -0,0 +1,982 @@
+# Copyright (c) 2010-2020 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.15.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+    string_types = str,
+    integer_types = int,
+    class_types = type,
+    text_type = str
+    binary_type = bytes
+
+    MAXSIZE = sys.maxsize
+else:
+    string_types = basestring,
+    integer_types = (int, long)
+    class_types = (type, types.ClassType)
+    text_type = unicode
+    binary_type = str
+
+    if sys.platform.startswith("java"):
+        # Jython always uses 32 bits.
+        MAXSIZE = int((1 << 31) - 1)
+    else:
+        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+        class X(object):
+
+            def __len__(self):
+                return 1 << 31
+        try:
+            len(X())
+        except OverflowError:
+            # 32-bit
+            MAXSIZE = int((1 << 31) - 1)
+        else:
+            # 64-bit
+            MAXSIZE = int((1 << 63) - 1)
+        del X
+
+
+def _add_doc(func, doc):
+    """Add documentation to a function."""
+    func.__doc__ = doc
+
+
+def _import_module(name):
+    """Import module, returning the module after the last dot."""
+    __import__(name)
+    return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+    def __init__(self, name):
+        self.name = name
+
+    def __get__(self, obj, tp):
+        result = self._resolve()
+        setattr(obj, self.name, result)  # Invokes __set__.
+        try:
+            # This is a bit ugly, but it avoids running this again by
+            # removing this descriptor.
+            delattr(obj.__class__, self.name)
+        except AttributeError:
+            pass
+        return result
+
+
+class MovedModule(_LazyDescr):
+
+    def __init__(self, name, old, new=None):
+        super(MovedModule, self).__init__(name)
+        if PY3:
+            if new is None:
+                new = name
+            self.mod = new
+        else:
+            self.mod = old
+
+    def _resolve(self):
+        return _import_module(self.mod)
+
+    def __getattr__(self, attr):
+        _module = self._resolve()
+        value = getattr(_module, attr)
+        setattr(self, attr, value)
+        return value
+
+
+class _LazyModule(types.ModuleType):
+
+    def __init__(self, name):
+        super(_LazyModule, self).__init__(name)
+        self.__doc__ = self.__class__.__doc__
+
+    def __dir__(self):
+        attrs = ["__doc__", "__name__"]
+        attrs += [attr.name for attr in self._moved_attributes]
+        return attrs
+
+    # Subclasses should override this
+    _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+        super(MovedAttribute, self).__init__(name)
+        if PY3:
+            if new_mod is None:
+                new_mod = name
+            self.mod = new_mod
+            if new_attr is None:
+                if old_attr is None:
+                    new_attr = name
+                else:
+                    new_attr = old_attr
+            self.attr = new_attr
+        else:
+            self.mod = old_mod
+            if old_attr is None:
+                old_attr = name
+            self.attr = old_attr
+
+    def _resolve(self):
+        module = _import_module(self.mod)
+        return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+    """
+    A meta path importer to import six.moves and its submodules.
+
+    This class implements a PEP302 finder and loader. It should be compatible
+    with Python 2.5 and all existing versions of Python3
+    """
+
+    def __init__(self, six_module_name):
+        self.name = six_module_name
+        self.known_modules = {}
+
+    def _add_module(self, mod, *fullnames):
+        for fullname in fullnames:
+            self.known_modules[self.name + "." + fullname] = mod
+
+    def _get_module(self, fullname):
+        return self.known_modules[self.name + "." + fullname]
+
+    def find_module(self, fullname, path=None):
+        if fullname in self.known_modules:
+            return self
+        return None
+
+    def __get_module(self, fullname):
+        try:
+            return self.known_modules[fullname]
+        except KeyError:
+            raise ImportError("This loader does not know module " + fullname)
+
+    def load_module(self, fullname):
+        try:
+            # in case of a reload
+            return sys.modules[fullname]
+        except KeyError:
+            pass
+        mod = self.__get_module(fullname)
+        if isinstance(mod, MovedModule):
+            mod = mod._resolve()
+        else:
+            mod.__loader__ = self
+        sys.modules[fullname] = mod
+        return mod
+
+    def is_package(self, fullname):
+        """
+        Return true, if the named module is a package.
+
+        We need this method to get correct spec objects with
+        Python 3.4 (see PEP451)
+        """
+        return hasattr(self.__get_module(fullname), "__path__")
+
+    def get_code(self, fullname):
+        """Return None
+
+        Required, if is_package is implemented"""
+        self.__get_module(fullname)  # eventually raises ImportError
+        return None
+    get_source = get_code  # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+    """Lazy loading of moved objects"""
+    __path__ = []  # mark as package
+
+
+_moved_attributes = [
+    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+    MovedAttribute("intern", "__builtin__", "sys"),
+    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+    MovedAttribute("getoutput", "commands", "subprocess"),
+    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+    MovedAttribute("reduce", "__builtin__", "functools"),
+    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+    MovedAttribute("StringIO", "StringIO", "io"),
+    MovedAttribute("UserDict", "UserDict", "collections"),
+    MovedAttribute("UserList", "UserList", "collections"),
+    MovedAttribute("UserString", "UserString", "collections"),
+    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+    MovedModule("builtins", "__builtin__"),
+    MovedModule("configparser", "ConfigParser"),
+    MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+    MovedModule("copyreg", "copy_reg"),
+    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+    MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
+    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+    MovedModule("http_cookies", "Cookie", "http.cookies"),
+    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+    MovedModule("html_parser", "HTMLParser", "html.parser"),
+    MovedModule("http_client", "httplib", "http.client"),
+    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+    MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+    MovedModule("cPickle", "cPickle", "pickle"),
+    MovedModule("queue", "Queue"),
+    MovedModule("reprlib", "repr"),
+    MovedModule("socketserver", "SocketServer"),
+    MovedModule("_thread", "thread", "_thread"),
+    MovedModule("tkinter", "Tkinter"),
+    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+    MovedModule("tkinter_colorchooser", "tkColorChooser",
+                "tkinter.colorchooser"),
+    MovedModule("tkinter_commondialog", "tkCommonDialog",
+                "tkinter.commondialog"),
+    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+                "tkinter.simpledialog"),
+    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+    _moved_attributes += [
+        MovedModule("winreg", "_winreg"),
+    ]
+
+for attr in _moved_attributes:
+    setattr(_MovedItems, attr.name, attr)
+    if isinstance(attr, MovedModule):
+        _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("quote", "urllib", "urllib.parse"),
+    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+    MovedAttribute("urlencode", "urllib", "urllib.parse"),
+    MovedAttribute("splitquery", "urllib", "urllib.parse"),
+    MovedAttribute("splittag", "urllib", "urllib.parse"),
+    MovedAttribute("splituser", "urllib", "urllib.parse"),
+    MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+    setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+                      "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+    MovedAttribute("URLError", "urllib2", "urllib.error"),
+    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+    setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+                      "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+    MovedAttribute("urlopen", "urllib2", "urllib.request"),
+    MovedAttribute("install_opener", "urllib2", "urllib.request"),
+    MovedAttribute("build_opener", "urllib2", "urllib.request"),
+    MovedAttribute("pathname2url", "urllib", "urllib.request"),
+    MovedAttribute("url2pathname", "urllib", "urllib.request"),
+    MovedAttribute("getproxies", "urllib", "urllib.request"),
+    MovedAttribute("Request", "urllib2", "urllib.request"),
+    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+    MovedAttribute("URLopener", "urllib", "urllib.request"),
+    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+    MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+    MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+    setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+                      "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+    MovedAttribute("addbase", "urllib", "urllib.response"),
+    MovedAttribute("addclosehook", "urllib", "urllib.response"),
+    MovedAttribute("addinfo", "urllib", "urllib.response"),
+    MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+    setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+                      "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+                      "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+    __path__ = []  # mark as package
+    parse = _importer._get_module("moves.urllib_parse")
+    error = _importer._get_module("moves.urllib_error")
+    request = _importer._get_module("moves.urllib_request")
+    response = _importer._get_module("moves.urllib_response")
+    robotparser = _importer._get_module("moves.urllib_robotparser")
+
+    def __dir__(self):
+        return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+                      "moves.urllib")
+
+
+def add_move(move):
+    """Add an item to six.moves."""
+    setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+    """Remove item from six.moves."""
+    try:
+        delattr(_MovedItems, name)
+    except AttributeError:
+        try:
+            del moves.__dict__[name]
+        except KeyError:
+            raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+    _meth_func = "__func__"
+    _meth_self = "__self__"
+
+    _func_closure = "__closure__"
+    _func_code = "__code__"
+    _func_defaults = "__defaults__"
+    _func_globals = "__globals__"
+else:
+    _meth_func = "im_func"
+    _meth_self = "im_self"
+
+    _func_closure = "func_closure"
+    _func_code = "func_code"
+    _func_defaults = "func_defaults"
+    _func_globals = "func_globals"
+
+
+try:
+    advance_iterator = next
+except NameError:
+    def advance_iterator(it):
+        return it.next()
+next = advance_iterator
+
+
+try:
+    callable = callable
+except NameError:
+    def callable(obj):
+        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+    def get_unbound_function(unbound):
+        return unbound
+
+    create_bound_method = types.MethodType
+
+    def create_unbound_method(func, cls):
+        return func
+
+    Iterator = object
+else:
+    def get_unbound_function(unbound):
+        return unbound.im_func
+
+    def create_bound_method(func, obj):
+        return types.MethodType(func, obj, obj.__class__)
+
+    def create_unbound_method(func, cls):
+        return types.MethodType(func, None, cls)
+
+    class Iterator(object):
+
+        def next(self):
+            return type(self).__next__(self)
+
+    callable = callable
+_add_doc(get_unbound_function,
+         """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+    def iterkeys(d, **kw):
+        return iter(d.keys(**kw))
+
+    def itervalues(d, **kw):
+        return iter(d.values(**kw))
+
+    def iteritems(d, **kw):
+        return iter(d.items(**kw))
+
+    def iterlists(d, **kw):
+        return iter(d.lists(**kw))
+
+    viewkeys = operator.methodcaller("keys")
+
+    viewvalues = operator.methodcaller("values")
+
+    viewitems = operator.methodcaller("items")
+else:
+    def iterkeys(d, **kw):
+        return d.iterkeys(**kw)
+
+    def itervalues(d, **kw):
+        return d.itervalues(**kw)
+
+    def iteritems(d, **kw):
+        return d.iteritems(**kw)
+
+    def iterlists(d, **kw):
+        return d.iterlists(**kw)
+
+    viewkeys = operator.methodcaller("viewkeys")
+
+    viewvalues = operator.methodcaller("viewvalues")
+
+    viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+         "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+         "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+    def b(s):
+        return s.encode("latin-1")
+
+    def u(s):
+        return s
+    unichr = chr
+    import struct
+    int2byte = struct.Struct(">B").pack
+    del struct
+    byte2int = operator.itemgetter(0)
+    indexbytes = operator.getitem
+    iterbytes = iter
+    import io
+    StringIO = io.StringIO
+    BytesIO = io.BytesIO
+    del io
+    _assertCountEqual = "assertCountEqual"
+    if sys.version_info[1] <= 1:
+        _assertRaisesRegex = "assertRaisesRegexp"
+        _assertRegex = "assertRegexpMatches"
+        _assertNotRegex = "assertNotRegexpMatches"
+    else:
+        _assertRaisesRegex = "assertRaisesRegex"
+        _assertRegex = "assertRegex"
+        _assertNotRegex = "assertNotRegex"
+else:
+    def b(s):
+        return s
+    # Workaround for standalone backslash
+
+    def u(s):
+        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+    unichr = unichr
+    int2byte = chr
+
+    def byte2int(bs):
+        return ord(bs[0])
+
+    def indexbytes(buf, i):
+        return ord(buf[i])
+    iterbytes = functools.partial(itertools.imap, ord)
+    import StringIO
+    StringIO = BytesIO = StringIO.StringIO
+    _assertCountEqual = "assertItemsEqual"
+    _assertRaisesRegex = "assertRaisesRegexp"
+    _assertRegex = "assertRegexpMatches"
+    _assertNotRegex = "assertNotRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+    return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+    return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+    return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+def assertNotRegex(self, *args, **kwargs):
+    return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
+if PY3:
+    exec_ = getattr(moves.builtins, "exec")
+
+    def reraise(tp, value, tb=None):
+        try:
+            if value is None:
+                value = tp()
+            if value.__traceback__ is not tb:
+                raise value.with_traceback(tb)
+            raise value
+        finally:
+            value = None
+            tb = None
+
+else:
+    def exec_(_code_, _globs_=None, _locs_=None):
+        """Execute code in a namespace."""
+        if _globs_ is None:
+            frame = sys._getframe(1)
+            _globs_ = frame.f_globals
+            if _locs_ is None:
+                _locs_ = frame.f_locals
+            del frame
+        elif _locs_ is None:
+            _locs_ = _globs_
+        exec("""exec _code_ in _globs_, _locs_""")
+
+    exec_("""def reraise(tp, value, tb=None):
+    try:
+        raise tp, value, tb
+    finally:
+        tb = None
+""")
+
+
+if sys.version_info[:2] > (3,):
+    exec_("""def raise_from(value, from_value):
+    try:
+        raise value from from_value
+    finally:
+        value = None
+""")
+else:
+    def raise_from(value, from_value):
+        raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+    def print_(*args, **kwargs):
+        """The new-style print function for Python 2.4 and 2.5."""
+        fp = kwargs.pop("file", sys.stdout)
+        if fp is None:
+            return
+
+        def write(data):
+            if not isinstance(data, basestring):
+                data = str(data)
+            # If the file has an encoding, encode unicode with it.
+            if (isinstance(fp, file) and
+                    isinstance(data, unicode) and
+                    fp.encoding is not None):
+                errors = getattr(fp, "errors", None)
+                if errors is None:
+                    errors = "strict"
+                data = data.encode(fp.encoding, errors)
+            fp.write(data)
+        want_unicode = False
+        sep = kwargs.pop("sep", None)
+        if sep is not None:
+            if isinstance(sep, unicode):
+                want_unicode = True
+            elif not isinstance(sep, str):
+                raise TypeError("sep must be None or a string")
+        end = kwargs.pop("end", None)
+        if end is not None:
+            if isinstance(end, unicode):
+                want_unicode = True
+            elif not isinstance(end, str):
+                raise TypeError("end must be None or a string")
+        if kwargs:
+            raise TypeError("invalid keyword arguments to print()")
+        if not want_unicode:
+            for arg in args:
+                if isinstance(arg, unicode):
+                    want_unicode = True
+                    break
+        if want_unicode:
+            newline = unicode("\n")
+            space = unicode(" ")
+        else:
+            newline = "\n"
+            space = " "
+        if sep is None:
+            sep = space
+        if end is None:
+            end = newline
+        for i, arg in enumerate(args):
+            if i:
+                write(sep)
+            write(arg)
+        write(end)
+if sys.version_info[:2] < (3, 3):
+    _print = print_
+
+    def print_(*args, **kwargs):
+        fp = kwargs.get("file", sys.stdout)
+        flush = kwargs.pop("flush", False)
+        _print(*args, **kwargs)
+        if flush and fp is not None:
+            fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+    # This does exactly the same what the :func:`py3:functools.update_wrapper`
+    # function does on Python versions after 3.2. It sets the ``__wrapped__``
+    # attribute on ``wrapper`` object and it doesn't raise an error if any of
+    # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+    # ``wrapped`` object.
+    def _update_wrapper(wrapper, wrapped,
+                        assigned=functools.WRAPPER_ASSIGNMENTS,
+                        updated=functools.WRAPPER_UPDATES):
+        for attr in assigned:
+            try:
+                value = getattr(wrapped, attr)
+            except AttributeError:
+                continue
+            else:
+                setattr(wrapper, attr, value)
+        for attr in updated:
+            getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+        wrapper.__wrapped__ = wrapped
+        return wrapper
+    _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+              updated=functools.WRAPPER_UPDATES):
+        return functools.partial(_update_wrapper, wrapped=wrapped,
+                                 assigned=assigned, updated=updated)
+    wraps.__doc__ = functools.wraps.__doc__
+
+else:
+    wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+    """Create a base class with a metaclass."""
+    # This requires a bit of explanation: the basic idea is to make a dummy
+    # metaclass for one level of class instantiation that replaces itself with
+    # the actual metaclass.
+    class metaclass(type):
+
+        def __new__(cls, name, this_bases, d):
+            if sys.version_info[:2] >= (3, 7):
+                # This version introduced PEP 560 that requires a bit
+                # of extra care (we mimic what is done by __build_class__).
+                resolved_bases = types.resolve_bases(bases)
+                if resolved_bases is not bases:
+                    d['__orig_bases__'] = bases
+            else:
+                resolved_bases = bases
+            return meta(name, resolved_bases, d)
+
+        @classmethod
+        def __prepare__(cls, name, this_bases):
+            return meta.__prepare__(name, bases)
+    return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+    """Class decorator for creating a class with a metaclass."""
+    def wrapper(cls):
+        orig_vars = cls.__dict__.copy()
+        slots = orig_vars.get('__slots__')
+        if slots is not None:
+            if isinstance(slots, str):
+                slots = [slots]
+            for slots_var in slots:
+                orig_vars.pop(slots_var)
+        orig_vars.pop('__dict__', None)
+        orig_vars.pop('__weakref__', None)
+        if hasattr(cls, '__qualname__'):
+            orig_vars['__qualname__'] = cls.__qualname__
+        return metaclass(cls.__name__, cls.__bases__, orig_vars)
+    return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+    """Coerce **s** to six.binary_type.
+
+    For Python 2:
+      - `unicode` -> encoded to `str`
+      - `str` -> `str`
+
+    For Python 3:
+      - `str` -> encoded to `bytes`
+      - `bytes` -> `bytes`
+    """
+    if isinstance(s, binary_type):
+        return s
+    if isinstance(s, text_type):
+        return s.encode(encoding, errors)
+    raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+    """Coerce *s* to `str`.
+
+    For Python 2:
+      - `unicode` -> encoded to `str`
+      - `str` -> `str`
+
+    For Python 3:
+      - `str` -> `str`
+      - `bytes` -> decoded to `str`
+    """
+    # Optimization: Fast return for the common case.
+    if type(s) is str:
+        return s
+    if PY2 and isinstance(s, text_type):
+        return s.encode(encoding, errors)
+    elif PY3 and isinstance(s, binary_type):
+        return s.decode(encoding, errors)
+    elif not isinstance(s, (text_type, binary_type)):
+        raise TypeError("not expecting type '%s'" % type(s))
+    return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+    """Coerce *s* to six.text_type.
+
+    For Python 2:
+      - `unicode` -> `unicode`
+      - `str` -> `unicode`
+
+    For Python 3:
+      - `str` -> `str`
+      - `bytes` -> decoded to `str`
+    """
+    if isinstance(s, binary_type):
+        return s.decode(encoding, errors)
+    elif isinstance(s, text_type):
+        return s
+    else:
+        raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+    """
+    A class decorator that defines __unicode__ and __str__ methods under Python 2.
+    Under Python 3 it does nothing.
+
+    To support Python 2 and 3 with a single code base, define a __str__ method
+    returning text and apply this decorator to the class.
+    """
+    if PY2:
+        if '__str__' not in klass.__dict__:
+            raise ValueError("@python_2_unicode_compatible cannot be applied "
+                             "to %s because it doesn't define __str__()." %
+                             klass.__name__)
+        klass.__unicode__ = klass.__str__
+        klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+    return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = []  # required for PEP 302 and PEP 451
+__package__ = __name__  # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+    for i, importer in enumerate(sys.meta_path):
+        # Here's some real nastiness: Another "instance" of the six module might
+        # be floating around. Therefore, we can't use isinstance() to check for
+        # the six meta path importer, since the other six instance will have
+        # inserted an importer with different class.
+        if (type(importer).__name__ == "_SixMetaPathImporter" and
+                importer.name == __name__):
+            del sys.meta_path[i]
+            break
+    del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/METADATA b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/METADATA
new file mode 100644
index 00000000..d595d874
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/METADATA
@@ -0,0 +1,30 @@
+Metadata-Version: 2.1
+Name: tensorflow-estimator
+Version: 2.5.0
+Summary: TensorFlow Estimator.
+Home-page: https://www.tensorflow.org/
+Author: Google Inc.
+License: Apache 2.0
+Download-URL: https://github.com/tensorflow/estimator/tags
+Keywords: tensorflow estimator tensor machine learning
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Scientific/Engineering
+Classifier: Topic :: Scientific/Engineering :: Mathematics
+Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+
+TensorFlow Estimator is a high-level API that encapsulates model training,
+evaluation, prediction, and exporting.
+
+
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/RECORD b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/RECORD
new file mode 100644
index 00000000..99aff514
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/RECORD
@@ -0,0 +1,239 @@
+tensorflow_estimator-2.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+tensorflow_estimator-2.5.0.dist-info/METADATA,sha256=08aVdR_o3YDcKNMrxe_fKvUKovd7JnytF9hEd9UqIDo,1221
+tensorflow_estimator-2.5.0.dist-info/RECORD,,
+tensorflow_estimator-2.5.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+tensorflow_estimator-2.5.0.dist-info/top_level.txt,sha256=I5a_-JjZRx_kmmdAqOEr7B9_4ynRkb0cLUdgoYHDeJY,21
+tensorflow_estimator/__init__.py,sha256=oLQyFA75sgEveKybWKONZpVzc5H-5hCH_4FP9IaBRX8,617
+tensorflow_estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/_api/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/_api/v1/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/__pycache__/v1.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/__init__.py,sha256=95azySea8XDFxjHiNK0OYUEitXt-CHV58Q4qgegQTcw,5661
+tensorflow_estimator/_api/v1/estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/experimental/__init__.py,sha256=SkdgNSNeUlSuxwHbMroAGGPs3pHiHkMSuADAl32ffro,1741
+tensorflow_estimator/_api/v1/estimator/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/export/__init__.py,sha256=2N7CUBb3PQESlb6YmF_ZTfCXy3yN8T7Cn9zH7FEuLAk,1497
+tensorflow_estimator/_api/v1/estimator/export/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/inputs/__init__.py,sha256=61p27FFjfuHVtG6-knUZJzRZLwjPRDnbynb3X7SWMgM,760
+tensorflow_estimator/_api/v1/estimator/inputs/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/tpu/__init__.py,sha256=uV1eRt-uhu7vaROZOZ4_X1nh_eNTnYN1d0lEvzaxH5w,1061
+tensorflow_estimator/_api/v1/estimator/tpu/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/estimator/tpu/experimental/__init__.py,sha256=10YZhbnajOg5Ue_yijBdCXG8DseEtvCXhu_RKXF4f2o,717
+tensorflow_estimator/_api/v1/estimator/tpu/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v1/v1.py,sha256=oLQyFA75sgEveKybWKONZpVzc5H-5hCH_4FP9IaBRX8,617
+tensorflow_estimator/_api/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/_api/v2/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/__pycache__/v2.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/estimator/__init__.py,sha256=JT65WsWWigo2DfQhQtBifCwOHuDoUZkyPOF6FT0Z80M,5612
+tensorflow_estimator/_api/v2/estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/estimator/experimental/__init__.py,sha256=XfZW1qF3g0S1hCwqeRQb1DZk5IpmjyFXOZsUCeHNNi8,1308
+tensorflow_estimator/_api/v2/estimator/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/estimator/export/__init__.py,sha256=-jM6MYRW1ZG0v05Ndz4jmZf2j-ne8q6ZllaUjQFG_iQ,1182
+tensorflow_estimator/_api/v2/estimator/export/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/estimator/inputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/_api/v2/estimator/inputs/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/_api/v2/v2.py,sha256=j3g9V-mGJ9a9j6K5BRkY7WmJpnHimp5hN8qpm193NHQ,318
+tensorflow_estimator/python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/early_stopping.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/estimator.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/estimator_lib.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/exporter.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/extenders.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/gc.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/keras.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/mode_keys.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/model_fn.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/run_config.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/training.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/__pycache__/util.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/__init__.py,sha256=bAAZ_bMUxV_t75E_GHwPMf05oErsjz1p6WUaasJapaY,634
+tensorflow_estimator/python/estimator/api/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/api/_v1/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/__pycache__/v1.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/__init__.py,sha256=UxBY2K-dydt35Qfcfkoztg4zvpUk1sMXcqgs4KlTPC8,5729
+tensorflow_estimator/python/estimator/api/_v1/estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/experimental/__init__.py,sha256=SkdgNSNeUlSuxwHbMroAGGPs3pHiHkMSuADAl32ffro,1741
+tensorflow_estimator/python/estimator/api/_v1/estimator/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/export/__init__.py,sha256=2N7CUBb3PQESlb6YmF_ZTfCXy3yN8T7Cn9zH7FEuLAk,1497
+tensorflow_estimator/python/estimator/api/_v1/estimator/export/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/inputs/__init__.py,sha256=61p27FFjfuHVtG6-knUZJzRZLwjPRDnbynb3X7SWMgM,760
+tensorflow_estimator/python/estimator/api/_v1/estimator/inputs/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/__init__.py,sha256=-mlbPMAKZqc31UBXvRwM4qZanpRyELemMZZgroMxu-E,1078
+tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/experimental/__init__.py,sha256=10YZhbnajOg5Ue_yijBdCXG8DseEtvCXhu_RKXF4f2o,717
+tensorflow_estimator/python/estimator/api/_v1/estimator/tpu/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v1/v1.py,sha256=bAAZ_bMUxV_t75E_GHwPMf05oErsjz1p6WUaasJapaY,634
+tensorflow_estimator/python/estimator/api/_v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/api/_v2/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/__pycache__/v2.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/estimator/__init__.py,sha256=ZLsBXjPjA4MmJnywbYupdNhX4IotPvheCC82CklYEj0,5646
+tensorflow_estimator/python/estimator/api/_v2/estimator/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/estimator/experimental/__init__.py,sha256=XfZW1qF3g0S1hCwqeRQb1DZk5IpmjyFXOZsUCeHNNi8,1308
+tensorflow_estimator/python/estimator/api/_v2/estimator/experimental/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/estimator/export/__init__.py,sha256=-jM6MYRW1ZG0v05Ndz4jmZf2j-ne8q6ZllaUjQFG_iQ,1182
+tensorflow_estimator/python/estimator/api/_v2/estimator/export/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/estimator/inputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/api/_v2/estimator/inputs/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/api/_v2/v2.py,sha256=c3IKGrVWyPiQjwKD5yRiF0ZYKKN02JUX6QubhWHJShY,335
+tensorflow_estimator/python/estimator/canned/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/canned/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/baseline.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/boosted_trees.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/boosted_trees_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/dnn.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/dnn_linear_combined.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/dnn_testing_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/kmeans.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/linear.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/linear_testing_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/metric_keys.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/optimizers.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/parsing_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/prediction_keys.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/rnn.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/__pycache__/saved_model_estimator.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/baseline.py,sha256=oFx2oMLNSBVi7MRF7Xz8wbgHH5p6ME6pgjE-cWDw4_g,23452
+tensorflow_estimator/python/estimator/canned/boosted_trees.py,sha256=yq-n80I-lr2qpJ7Or7d4nbZ0L9v4nMyIhzu_M2XEP6U,101185
+tensorflow_estimator/python/estimator/canned/boosted_trees_utils.py,sha256=gTnNodBYw0Mtfy6qCQtBVLfgJbsBtbAT_U0OsSxi1XU,3635
+tensorflow_estimator/python/estimator/canned/dnn.py,sha256=jVPzJxQ4oOoHGQWnlwdI9oWU3wHJLM7xzR0MfgUPvb0,48603
+tensorflow_estimator/python/estimator/canned/dnn_linear_combined.py,sha256=BLG9F2-C1QlTKAuZutd9EobcZEeDLZYSc31r0kVzmOM,48240
+tensorflow_estimator/python/estimator/canned/dnn_testing_utils.py,sha256=uxE9BkYl2joeFDfreLtPJZ6nnv3OfUrsmj6wwPcVOpo,82167
+tensorflow_estimator/python/estimator/canned/head.py,sha256=vt2Jd3-FlIjBCUWekx__3Xij5QqosASjlHs5ZLAiQpM,72640
+tensorflow_estimator/python/estimator/canned/kmeans.py,sha256=TUS87lwUFXLfKOq_MAWrI11aYJ9vFaylt7zjR-gNyyc,20373
+tensorflow_estimator/python/estimator/canned/linear.py,sha256=cs3NprIgZpws3x5pzWnkewDuS_g0ZHkQjN0YUJfFfFs,68061
+tensorflow_estimator/python/estimator/canned/linear_optimizer/__init__.py,sha256=MLbcdSxHXsd5ySoHzyIcLAakGCMK0AfrrN336JEKnUw,994
+tensorflow_estimator/python/estimator/canned/linear_optimizer/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/__pycache__/sdca_ops.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/__pycache__/sharded_mutable_dense_hashtable.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/sdca_ops.py,sha256=ov2t9kNloA6Z82xZ3W_qkZ_8-ZXa1kKUHkP_QLFrGLo,32208
+tensorflow_estimator/python/estimator/canned/linear_optimizer/python/utils/sharded_mutable_dense_hashtable.py,sha256=X4ClW_-XpIGN1EKaYdh_jQUTNJGNR4Tc88XWMesgZNA,14149
+tensorflow_estimator/python/estimator/canned/linear_testing_utils.py,sha256=x4oSIzluY-FQKkjFlDj-8xdeLDg0OOtEujfOqd95ELU,84286
+tensorflow_estimator/python/estimator/canned/metric_keys.py,sha256=6PP6vlsIpYTtBS4Z5HNWmiKBKPukLPZrPNoxmLIrNiA,2364
+tensorflow_estimator/python/estimator/canned/optimizers.py,sha256=xicDgChVzos9jAG_E-eoXUpfeBc0XDC1IjdRl_V5YSw,5680
+tensorflow_estimator/python/estimator/canned/parsing_utils.py,sha256=PLX5K8V_qI2mQNHoHw5i5Frk5ULxQ8-xsXKzSSmTo1s,15288
+tensorflow_estimator/python/estimator/canned/prediction_keys.py,sha256=zjnNIOLJkzGKNXAUVYKLzkPv3Zm8mQXy7BrSiPQGApg,1275
+tensorflow_estimator/python/estimator/canned/rnn.py,sha256=4lRw8-Y2iCfLjCQjoSPjgQDow9Y5-eLYAUnBLqbcCPM,28854
+tensorflow_estimator/python/estimator/canned/saved_model_estimator.py,sha256=Giyh9uinq93Ycvnxnb-oGfugC9B21d4xGrE5_68wTSM,20330
+tensorflow_estimator/python/estimator/canned/timeseries/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/ar_model.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/estimators.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/feature_keys.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/math_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/model.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/model_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/saved_model_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/__pycache__/state_management.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/timeseries/ar_model.py,sha256=V1GMWyE46arhalALNDUM58jd8ICm5-VN0q8cl4hwFl0,38457
+tensorflow_estimator/python/estimator/canned/timeseries/estimators.py,sha256=Hq0JC8B_hPDGvIm6cjg7e302pX27wYbovDtgcmgK9xA,20770
+tensorflow_estimator/python/estimator/canned/timeseries/feature_keys.py,sha256=CLMNttwFVloDyAfbvl37ov85o8dt51K94H83C8u7pTM,2314
+tensorflow_estimator/python/estimator/canned/timeseries/head.py,sha256=Q-JPsPxRn_dNTB3sBYGUFK5_g6qC9KVMeIQqniMFLOc,21379
+tensorflow_estimator/python/estimator/canned/timeseries/math_utils.py,sha256=fbrrJqsS3oMN3mD8-Gmd0o2CsMci3-1PgeCjLSJC1wU,19761
+tensorflow_estimator/python/estimator/canned/timeseries/model.py,sha256=AbpPj-xlA79JXsqLLbE6dT5vSAvi58ZerCiPcrXnKII,14441
+tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py,sha256=Sw3s8gDh-84RGNNHyYFF6sIw77arm90A-mD0xlOOcx8,3212
+tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py,sha256=FTBGOgJmvhOIRXvKfRQ7SBHkeGhZk22273dqkgowuFc,14941
+tensorflow_estimator/python/estimator/canned/timeseries/state_management.py,sha256=qnzTQulz0s28On9BqZl4RNGSY81foVLtknRd35m-mvI,4004
+tensorflow_estimator/python/estimator/canned/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/canned/v1/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/v1/__pycache__/dnn_testing_utils_v1.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/v1/__pycache__/linear_testing_utils_v1.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/canned/v1/dnn_testing_utils_v1.py,sha256=Vv0kdjPydmGBia4iNcicUfc1PsfFLPYAY080WBqTYPU,82639
+tensorflow_estimator/python/estimator/canned/v1/linear_testing_utils_v1.py,sha256=UuakiSqhGicg0N_1kZ_1yiC1VsP85OEa6J0I1-gzjJo,91108
+tensorflow_estimator/python/estimator/early_stopping.py,sha256=NGnPfaL7Yv_HOvHgZh9N1DqvRgcUNhlgF8D4gYpB81k,24051
+tensorflow_estimator/python/estimator/estimator.py,sha256=8F5JPFHVKIfi7R1JdWHbHb7WHoQPfTkiSnti6bACp6A,102225
+tensorflow_estimator/python/estimator/estimator_lib.py,sha256=Pox9S-OOl6A2FJcKAP_n86LqV9chovopq-q9Cj60_0o,5069
+tensorflow_estimator/python/estimator/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/export/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/export/__pycache__/export.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/export/__pycache__/export_lib.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/export/__pycache__/export_output.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/export/__pycache__/function.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/export/export.py,sha256=uxyZL8IVkNEQN6GAcWKfAcAOm9cCf1Qv_WbruQG2Zc4,20630
+tensorflow_estimator/python/estimator/export/export_lib.py,sha256=CyumaeOc_6LrwFyYcyr9NKZiTdI9uphRcACtKuBa6F0,3055
+tensorflow_estimator/python/estimator/export/export_output.py,sha256=fhLdAtNdJwUxBGLNNrXVHm6neAYbnbimHYWeoKq279o,1902
+tensorflow_estimator/python/estimator/export/function.py,sha256=jJnGyHh1XG0LAOP2dKdw-wMZFmSy4HWg9qP4wUEF-jY,13868
+tensorflow_estimator/python/estimator/exporter.py,sha256=fYG89TOY1xrv6hYiWUETjpZvbQpUM0wR1yyHlS1o6EQ,19851
+tensorflow_estimator/python/estimator/extenders.py,sha256=Y4M8UTY2pFAJHzbMXSr70DzEy9A0GpyXfYUku36dn64,4874
+tensorflow_estimator/python/estimator/gc.py,sha256=u39vcSzNBDuqbdtO_KcHWv6yWIDTsnUDxTKzqcy4nF0,6327
+tensorflow_estimator/python/estimator/head/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/head/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/base_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/binary_class_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/head_utils.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/multi_class_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/multi_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/multi_label_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/regression_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/__pycache__/sequential_head.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/head/base_head.py,sha256=2BybrLecms5kunaYSYDhat1hTLsAIWj-M-JmRRDwJWY,39763
+tensorflow_estimator/python/estimator/head/binary_class_head.py,sha256=crEIByjSAPvF4BMEXyhBzI4e0gxCECNBfxvQPDyR8To,26799
+tensorflow_estimator/python/estimator/head/head_utils.py,sha256=KqrWIoLqsLgtGt5W3ZKfB38OJhCdfLY-StgotnozBgY,4028
+tensorflow_estimator/python/estimator/head/multi_class_head.py,sha256=3UocuMmxdbT_3T7B7g-yCSZBR_k2w0gWFWrC3jBNbTw,21407
+tensorflow_estimator/python/estimator/head/multi_head.py,sha256=zMm4urGTEHaI8vea7F1T-65FhSh_NYo3oVnBzgUfWBQ,23311
+tensorflow_estimator/python/estimator/head/multi_label_head.py,sha256=YWZxzwq274LgDwscmQeOcFvoSzSjW4wgXgUVvx-lcAE,26692
+tensorflow_estimator/python/estimator/head/regression_head.py,sha256=1WIL1B7fWwNXLG14qWLaGeQ7J4qVgO_y9wJ-FLirzpU,23746
+tensorflow_estimator/python/estimator/head/sequential_head.py,sha256=VkV9Oe42ApfbOkkCCLGQxoCg9xQ8c1T1xXqexDOq9TA,20736
+tensorflow_estimator/python/estimator/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/hooks/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/hooks/__pycache__/basic_session_run_hooks.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/hooks/__pycache__/fake_summary_writer.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/hooks/__pycache__/hooks.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/hooks/__pycache__/session_run_hook.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/hooks/basic_session_run_hooks.py,sha256=moxTfzDQpt-9j8nO4WHeOJr-sDoSCT0sgIThAmSsq8U,2813
+tensorflow_estimator/python/estimator/hooks/fake_summary_writer.py,sha256=mcenQ5maQfZnkwx-aXeIsvQliP7h1jQJ4xXtk354cnE,5595
+tensorflow_estimator/python/estimator/hooks/hooks.py,sha256=OzVJUsxgjHvSOgaAjOi2BqyYV6Jxhw7HSqx_zmJp2Ew,11050
+tensorflow_estimator/python/estimator/hooks/session_run_hook.py,sha256=CyI4FUdD0oITSfSw3DxwpwgUbGMHsbfxz6UjYs-ZvJA,4182
+tensorflow_estimator/python/estimator/inputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/inputs/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/__pycache__/inputs.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/__pycache__/numpy_io.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/__pycache__/pandas_io.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/inputs.py,sha256=LlrWANUzivltc7re_cWw2GOFjA3Nv3Og6eDHE9_0b4M,1106
+tensorflow_estimator/python/estimator/inputs/numpy_io.py,sha256=il6e8hnta5lFInLsgYrTKiaQZ6In5h8Qg7hWxqZNjh8,7996
+tensorflow_estimator/python/estimator/inputs/pandas_io.py,sha256=XbimU7hWlEJiALnR0MrXf8gnSnAKAzSsHbxFHkfx5tw,5837
+tensorflow_estimator/python/estimator/inputs/queues/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/inputs/queues/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/queues/__pycache__/feeding_functions.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/queues/__pycache__/feeding_queue_runner.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/inputs/queues/feeding_functions.py,sha256=gA7rAl6PZyyRCTKnwG46OpYQDVlVUBqIqQXfzBB4Idk,18804
+tensorflow_estimator/python/estimator/inputs/queues/feeding_queue_runner.py,sha256=ncwpuDkE_UT_UoQhqa1iHJwYIyopkAlRNuvGlEOqWEw,6883
+tensorflow_estimator/python/estimator/keras.py,sha256=ez5VtAJKJ-BahgMaoWMUoW52cUHyzUWUQLPuVLC35SU,30837
+tensorflow_estimator/python/estimator/mode_keys.py,sha256=N2FaqYgjkEGxbBlLrX5ZRRcFQpwCSEftzac5zB-4jO4,1058
+tensorflow_estimator/python/estimator/model_fn.py,sha256=rbk0POVz51-_vrHqLAOzn6xPPfDVOu7yjyH68JKDXgs,25018
+tensorflow_estimator/python/estimator/run_config.py,sha256=-uxcTYkdguIK-oeKeDqa4yhXQ93hXKysiUcaxIbVf7A,38307
+tensorflow_estimator/python/estimator/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/tools/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tools/__pycache__/analytics.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tools/__pycache__/checkpoint_converter.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tools/analytics.py,sha256=6u-GRvFu3eYiozLNbl7Aj0IVliDHtOxsnvOuT4k0YvI,1265
+tensorflow_estimator/python/estimator/tools/checkpoint_converter.py,sha256=j6j7iij04Fefdr9MacQiCU_FtIos6zlH1x3D_XuqkwQ,15165
+tensorflow_estimator/python/estimator/tpu/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tensorflow_estimator/python/estimator/tpu/__pycache__/__init__.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/_tpu_estimator_embedding.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/error_handling.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/iteration_count_estimator.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/tpu_config.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/tpu_context.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/tpu_estimator.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/__pycache__/util.cpython-37.pyc,,
+tensorflow_estimator/python/estimator/tpu/_tpu_estimator_embedding.py,sha256=Qgv5e76xl0y1SF9tEmdGLRXN2VnkTdamA_7AOpHZwGk,28343
+tensorflow_estimator/python/estimator/tpu/error_handling.py,sha256=brmHZ_LB3uGfKmWNcMAbj9GSaOjHCMXKdp8tmr_uwyA,5327
+tensorflow_estimator/python/estimator/tpu/iteration_count_estimator.py,sha256=-6F6Ap1jv3ukv7_S0G50LgjbTfd-lI7FqxhUoyvL63U,7833
+tensorflow_estimator/python/estimator/tpu/tpu_config.py,sha256=iekrar_q5MT5Cw7IGK7GV5fNI7VKF69BEd5iOGJAoQA,14839
+tensorflow_estimator/python/estimator/tpu/tpu_context.py,sha256=j8XXHBt181fOWi2JVJ3-I04TZm2VltUJZLh7KpMiH_A,34489
+tensorflow_estimator/python/estimator/tpu/tpu_estimator.py,sha256=QD5023BciXKE1SIJYFxKvdzndEGCXtDdtKeh2qQI80E,185657
+tensorflow_estimator/python/estimator/tpu/util.py,sha256=tpB0jmmBzcCZPF5n7Rc3EsPdBXKtTeenl_UhlGJUhWw,3607
+tensorflow_estimator/python/estimator/training.py,sha256=WePVJKfpFe0-cFpLnJKxmV8oipLWWhAItT4586LLbUk,44030
+tensorflow_estimator/python/estimator/util.py,sha256=UnYBuuVtqWIzaFrJ1HOUU1jITE3rEVptPQzO4-VG10U,4170
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/WHEEL b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/WHEEL
new file mode 100644
index 00000000..01b8fc7d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/top_level.txt
new file mode 100644
index 00000000..a909c9e7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator-2.5.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+tensorflow_estimator
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/__init__.py
new file mode 100644
index 00000000..7e5b9956
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/__init__.py
@@ -0,0 +1,19 @@
+# This file is MACHINE GENERATED! Do not edit.
+# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
+"""Public API for tf. namespace.
+"""
+
+from __future__ import print_function as _print_function
+
+import sys as _sys
+
+from tensorflow_estimator._api.v1 import estimator
+
+del _print_function
+
+from tensorflow.python.util import module_wrapper as _module_wrapper
+
+if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
+  _sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
+      _sys.modules[__name__], "", public_apis=None, deprecation=True,
+      has_lite=False)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/baseline.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/baseline.py
new file mode 100644
index 00000000..01b518e9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/baseline.py
@@ -0,0 +1,651 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Baseline estimators.
+
+Baseline estimators are bias-only estimators that can be used for debugging
+and as simple baselines.
+
+Example:
+
+```
+# Build BaselineClassifier
+classifier = BaselineClassifier(n_classes=3)
+
+# Input builders
+def input_fn_train():
+  # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+  # index.
+  pass
+
+def input_fn_eval():
+  # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+  # index.
+  pass
+
+# Fit model.
+classifier.train(input_fn=input_fn_train)
+
+# Evaluate cross entropy between the test and train labels.
+loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
+
+# predict outputs the probability distribution of the classes as seen in
+# training.
+predictions = classifier.predict(new_samples)
+```
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column as feature_column_v1
+from tensorflow.python.feature_column import feature_column_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.canned import optimizers
+from tensorflow_estimator.python.estimator.head import head_utils
+from tensorflow_estimator.python.estimator.head import regression_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# The default learning rate of 0.3 is a historical artifact of the initial
+# implementation, but seems a reasonable choice.
+_LEARNING_RATE = 0.3
+
+
+def _get_weight_column_key(weight_column):
+  if weight_column is None:
+    return None
+  if isinstance(weight_column, six.string_types):
+    return weight_column
+  if not isinstance(weight_column, feature_column_v1._NumericColumn):  # pylint: disable=protected-access
+    raise TypeError('Weight column must be either a string or _NumericColumn.'
+                    ' Given type: {}.'.format(type(weight_column)))
+  return weight_column.key()
+
+
+def _get_weight_column_key_v2(weight_column):
+  if weight_column is None:
+    return None
+  if isinstance(weight_column, six.string_types):
+    return weight_column
+  if not isinstance(weight_column, feature_column_v2.NumericColumn):
+    raise TypeError('Weight column must be either a string or NumericColumn. '
+                    'Given type: {}.'.format(type(weight_column)))
+  return weight_column.key()
+
+
+def _get_batch_size_and_size_checks(features, weight_column_key):
+  """Returns batch_size and size_checks."""
+  size_checks = []
+  batch_size = None
+
+  # The first dimension is assumed to be a batch size and must be consistent
+  # among all of the features.
+  for key, feature in features.items():
+    # Skip weight_column to ensure we don't add size checks to it.
+    # These would introduce a dependency on the weight at serving time.
+    if key == weight_column_key:
+      continue
+    first_dim = tf.compat.v1.shape(feature)[0]
+    if batch_size is None:
+      batch_size = first_dim
+    else:
+      size_checks.append(
+          tf.compat.v1.debugging.assert_equal(batch_size, first_dim))
+
+  return size_checks, batch_size
+
+
+def _baseline_logit_fn_builder(num_outputs, weight_column=None):
+  """Function builder for a baseline logit_fn.
+
+  Args:
+    num_outputs: Number of outputs for the model.
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It will be multiplied by the loss of the example.
+
+  Returns:
+    A logit_fn (see below).
+  """
+
+  def baseline_logit_fn(features):
+    """Baseline model logit_fn.
+
+    The baseline model simply learns a bias, so the output logits are a
+    `Variable` with one weight for each output that learns the bias for the
+    corresponding output.
+
+    Args:
+      features: The first item returned from the `input_fn` passed to `train`,
+        `evaluate`, and `predict`. This should be a single `Tensor` or dict with
+        `Tensor` values.
+
+    Returns:
+      A `Tensor` representing the logits.
+    """
+    weight_column_key = _get_weight_column_key(weight_column)
+    size_checks, batch_size = _get_batch_size_and_size_checks(
+        features, weight_column_key)
+    with tf.control_dependencies(size_checks):
+      with tf.compat.v1.variable_scope('baseline'):
+        bias = tf.compat.v1.get_variable(
+            'bias',
+            shape=[num_outputs],
+            initializer=tf.compat.v1.initializers.zeros)
+        return tf.math.multiply(bias, tf.ones([batch_size, num_outputs]))
+
+  return baseline_logit_fn
+
+
+def _baseline_model_fn(features,
+                       labels,
+                       mode,
+                       head,
+                       optimizer,
+                       weight_column=None,
+                       config=None):
+  """Model_fn for baseline models.
+
+  Args:
+    features: `Tensor` or dict of `Tensor` (depends on data passed to `train`).
+    labels: `Tensor` of labels that are compatible with the `Head` instance.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    optimizer: String, `tf.Optimizer` object, or callable that creates the
+      optimizer to use for training. If not specified, will use `FtrlOptimizer`
+      with a default learning rate of 0.3.
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It will be multiplied by the loss of the example.
+    config: `RunConfig` object to configure the runtime settings.
+
+  Raises:
+    KeyError: If weight column is specified but not present.
+    ValueError: If features is an empty dictionary.
+
+  Returns:
+    An `EstimatorSpec` instance.
+  """
+  del config  # Unused.
+
+  logit_fn = _baseline_logit_fn_builder(head.logits_dimension, weight_column)
+  logits = logit_fn(features)
+
+  def train_op_fn(loss):
+    opt = optimizers.get_optimizer_instance(
+        optimizer, learning_rate=_LEARNING_RATE)
+    return opt.minimize(loss, global_step=tf.compat.v1.train.get_global_step())
+
+  return head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      logits=logits,
+      labels=labels,
+      train_op_fn=train_op_fn)
+
+
+def _baseline_model_fn_builder_v2(features, num_outputs, weight_column=None):
+  """Function builder for a baseline logit_fn.
+
+  Args:
+    features: The first item returned from the `input_fn` passed to `train`,
+      `evaluate`, and `predict`. This should be a single `Tensor` or dict with
+      `Tensor` values.
+    num_outputs: Number of outputs for the model.
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It will be multiplied by the loss of the example.
+
+  Returns:
+    A list of trainable variables and a `Tensor` representing the logits.
+  """
+  weight_column_key = _get_weight_column_key_v2(weight_column)
+  size_checks, batch_size = _get_batch_size_and_size_checks(
+      features, weight_column_key)
+  with tf.control_dependencies(size_checks):
+    with ops.name_scope('baseline'):
+      bias = tf.Variable(initial_value=tf.zeros([num_outputs]), name='bias')
+      logits = tf.math.multiply(bias, tf.ones([batch_size, num_outputs]))
+  return [bias], logits
+
+
+def _baseline_model_fn_v2(
+    features,
+    labels,
+    mode,
+    head,
+    optimizer,
+    weight_column=None,
+    config=None,
+    loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE):
+  """Model_fn for baseline models.
+
+  Args:
+    features: `Tensor` or dict of `Tensor` (depends on data passed to `train`).
+    labels: `Tensor` of labels that are compatible with the `Head` instance.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    optimizer: String, `tf.Optimizer` object, or callable that creates the
+      optimizer to use for training. If not specified, will use `FtrlOptimizer`
+      with a default learning rate of 0.3.
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It will be multiplied by the loss of the example.
+    config: `RunConfig` object to configure the runtime settings.
+    loss_reduction: One of `tf.keras.losses.Reduction` except `NONE`. Describes
+      how to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+  Raises:
+    KeyError: If weight column is specified but not present.
+    ValueError: If features is an empty dictionary.
+
+  Returns:
+    An `EstimatorSpec` instance.
+  """
+  del config  # Unused.
+
+  trainable_variables, logits = _baseline_model_fn_builder_v2(
+      features, head.logits_dimension, weight_column)
+
+  # In TRAIN mode, create optimizer and assign global_step variable to
+  # optimizer.iterations to make global_step increased correctly, as Hooks
+  # relies on global step as step counter.
+  if mode == ModeKeys.TRAIN:
+    opt = optimizers.get_optimizer_instance_v2(
+        optimizer, learning_rate=_LEARNING_RATE)
+    opt.iterations = tf.compat.v1.train.get_or_create_global_step()
+
+  def train_op_fn(loss):
+    # Scale loss by number of replicas.
+    if loss_reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE:
+      loss = losses_utils.scale_loss_for_distribution(loss)
+    return opt.get_updates(loss, trainable_variables)[0]
+
+  return head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      logits=logits,
+      labels=labels,
+      train_op_fn=train_op_fn)
+
+
+@estimator_export('estimator.BaselineClassifier', v1=[])
+class BaselineClassifierV2(estimator.EstimatorV2):
+  """A classifier that can establish a simple baseline.
+
+  This classifier ignores feature values and will learn to predict the average
+  value of each label. For single-label problems, this will predict the
+  probability distribution of the classes as seen in the labels. For multi-label
+  problems, this will predict the fraction of examples that are positive for
+  each class.
+
+  Example:
+
+  ```python
+
+  # Build BaselineClassifier
+  classifier = tf.estimator.BaselineClassifier(n_classes=3)
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  # Fit model.
+  classifier.train(input_fn=input_fn_train)
+
+  # Evaluate cross entropy between the test and train labels.
+  loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
+
+  # predict outputs the probability distribution of the classes as seen in
+  # training.
+  predictions = classifier.predict(new_samples)
+
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+    otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with
+     `key=weight_column` whose value is a `Tensor`.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               optimizer='Ftrl',
+               config=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE):
+    """Initializes a BaselineClassifier instance.
+
+    Args:
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      n_classes: number of label classes. Default is binary classification.
+        It must be greater than 1. Note: Class labels are integers representing
+          the class index (i.e. values from 0 to n_classes-1). For arbitrary
+          label values (e.g. string labels), convert to class indices first.
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It will be multiplied by the loss of the example.
+      label_vocabulary: Optional list of strings with size `[n_classes]`
+        defining the label vocabulary. Only supported for `n_classes` > 2.
+      optimizer: String, `tf.keras.optimizers.*` object, or callable that
+        creates the optimizer to use for training. If not specified, will use
+        `Ftrl` as the default optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+    Returns:
+      A `BaselineClassifier` estimator.
+
+    Raises:
+      ValueError: If `n_classes` < 2.
+    """
+    head = head_utils.binary_or_multi_class_head(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          weight_column=weight_column,
+          config=config,
+          loss_reduction=loss_reduction)
+
+    super(BaselineClassifierV2, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export(v1=['estimator.BaselineClassifier'])  # pylint: disable=missing-docstring
+class BaselineClassifier(estimator.Estimator):
+  __doc__ = BaselineClassifierV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               optimizer='Ftrl',
+               config=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM):
+    head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
+        n_classes, weight_column, label_vocabulary, loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          weight_column=weight_column,
+          config=config)
+
+    super(BaselineClassifier, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export('estimator.BaselineEstimator', v1=[])
+class BaselineEstimatorV2(estimator.EstimatorV2):
+  """An estimator that can establish a simple baseline.
+
+  The estimator uses a user-specified head.
+
+  This estimator ignores feature values and will learn to predict the average
+  value of each label. E.g. for single-label classification problems, this will
+  predict the probability distribution of the classes as seen in the labels.
+  For multi-label classification problems, it will predict the ratio of examples
+  that contain each class.
+
+  Example:
+
+  ```python
+
+  # Build baseline multi-label classifier.
+  estimator = tf.estimator.BaselineEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3))
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  # Fit model.
+  estimator.train(input_fn=input_fn_train)
+
+  # Evaluates cross entropy between the test and train labels.
+  loss = estimator.evaluate(input_fn=input_fn_eval)["loss"]
+
+  # For each class, predicts the ratio of training examples that contain the
+  # class.
+  predictions = estimator.predict(new_samples)
+
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+    otherwise there will be a `KeyError`:
+
+  * if `weight_column` is specified in the `head` constructor (and not None) for
+    the head passed to BaselineEstimator's constructor, a feature with
+    `key=weight_column` whose value is a `Tensor`.
+  """
+
+  def __init__(self, head, model_dir=None, optimizer='Ftrl', config=None):
+    """Initializes a BaselineEstimator instance.
+
+    Args:
+      head: A `Head` instance constructed with a method such as
+        `tf.estimator.MultiLabelHead`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      optimizer: String, `tf.keras.optimizers.*` object, or callable that
+        creates the optimizer to use for training. If not specified, will use
+        `Ftrl` as the default optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+    """
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          config=config)
+
+    super(BaselineEstimatorV2, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export(v1=['estimator.BaselineEstimator'])  # pylint: disable=missing-docstring
+class BaselineEstimator(estimator.Estimator):
+  __doc__ = BaselineEstimatorV2.__doc__
+
+  def __init__(self, head, model_dir=None, optimizer='Ftrl', config=None):
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          config=config)
+
+    super(BaselineEstimator, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export('estimator.BaselineRegressor', v1=[])
+class BaselineRegressorV2(estimator.EstimatorV2):
+  """A regressor that can establish a simple baseline.
+
+  This regressor ignores feature values and will learn to predict the average
+  value of each label.
+
+  Example:
+
+  ```python
+
+  # Build BaselineRegressor
+  regressor = tf.estimator.BaselineRegressor()
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+
+  # Fit model.
+  regressor.train(input_fn=input_fn_train)
+
+  # Evaluate squared-loss between the test and train targets.
+  loss = regressor.evaluate(input_fn=input_fn_eval)["loss"]
+
+  # predict outputs the mean value seen during training.
+  predictions = regressor.predict(new_samples)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+    otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with
+     `key=weight_column` whose value is a `Tensor`.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               model_dir=None,
+               label_dimension=1,
+               weight_column=None,
+               optimizer='Ftrl',
+               config=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE):
+    """Initializes a BaselineRegressor instance.
+
+    Args:
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      label_dimension: Number of regression targets per example. This is the
+        size of the last dimension of the labels and logits `Tensor` objects
+        (typically, these have shape `[batch_size, label_dimension]`).
+      weight_column: A string or a `_NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It will be multiplied by the loss of the example.
+      optimizer: String, `tf.keras.optimizers.*` object, or callable that
+        creates the optimizer to use for training. If not specified, will use
+        `Ftrl` as the default optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+    Returns:
+      A `BaselineRegressor` estimator.
+    """
+    head = regression_head.RegressionHead(
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          config=config)
+
+    super(BaselineRegressorV2, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export(v1=['estimator.BaselineRegressor'])  # pylint: disable=missing-docstring
+class BaselineRegressor(estimator.Estimator):
+  __doc__ = BaselineRegressorV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               model_dir=None,
+               label_dimension=1,
+               weight_column=None,
+               optimizer='Ftrl',
+               config=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM):
+    head = head_lib._regression_head(  # pylint: disable=protected-access
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      return _baseline_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          optimizer=optimizer,
+          config=config)
+
+    super(BaselineRegressor, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py
new file mode 100644
index 00000000..c5cd9f9c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py
@@ -0,0 +1,2391 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Estimator classes for BoostedTrees."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+import collections
+import contextlib
+import functools
+
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.core.kernels.boosted_trees import boosted_trees_pb2
+from tensorflow.python.feature_column import feature_column as fc_old
+from tensorflow.python.feature_column import feature_column_lib
+from tensorflow.python.feature_column import feature_column_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import boosted_trees_ops
+from tensorflow.python.ops import cond_v2
+from tensorflow.python.ops import lookup_ops
+from tensorflow.python.ops.array_ops import identity as tf_identity
+from tensorflow.python.ops.parallel_for import gradients as parallel_for_gradients
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import boosted_trees_utils
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# TODO(nponomareva): Reveal pruning params here.
+_TreeHParams = collections.namedtuple('TreeHParams', [
+    'n_trees', 'max_depth', 'learning_rate', 'l1', 'l2', 'tree_complexity',
+    'min_node_weight', 'center_bias', 'pruning_mode', 'quantile_sketch_epsilon'
+])
+
+_DUMMY_NUM_BUCKETS = -1
+_DUMMY_NODE_ID = -1
+_EQUALITY_SPLIT = 'equality'
+_INEQUALITY_SPLIT = 'inequality'
+_QUANTILE_ACCUMULATOR_RESOURCE_NAME = 'QuantileAccumulator'
+
+
+def _is_numeric_column(feature_column):
+  """Returns True if column is a continuous numeric that should be bucketized."""
+  # These columns always produce categorical integers and do not require
+  # additional bucketization.
+  if isinstance(
+      feature_column,
+      (
+          feature_column_lib.CategoricalColumn,
+          fc_old._CategoricalColumn,  # pylint:disable=protected-access
+          feature_column_lib.BucketizedColumn,
+          fc_old._BucketizedColumn,  # pylint:disable=protected-access
+          feature_column_lib.IndicatorColumn,
+          fc_old._IndicatorColumn)):  # pylint:disable=protected-access
+    return False
+  # NumericColumns are always interpreted as continuous numerics.
+  if isinstance(feature_column,
+                (feature_column_lib.NumericColumn, fc_old._NumericColumn)):
+    return True
+  # For other dense columns, the dtype is used.
+  if isinstance(feature_column,
+                (feature_column_lib.DenseColumn, fc_old._DenseColumn)):
+    # NOTE: GBDT requires that all DenseColumns expose a dtype attribute
+    return feature_column.dtype.is_floating
+  else:
+    raise ValueError('Encountered unexpected column {}'.format(feature_column))
+
+
+def _get_feature_dimensions(feature_ids_list, input_feature_list):
+  """Get feature dimensions.
+
+  Args:
+    feature_ids_list: a list of lists of feature ids for each bucket size.
+    input_feature_list: a list of input feature tensors.
+
+  Returns:
+    feature_dimensions: a list of dimensions for each feature.
+  """
+  # TODO(crawles): group feature dimensions by bucket (similar to feature_ids).
+  feature_dimensions = []
+  for feature_ids in feature_ids_list:
+    for feature_id in feature_ids:
+      feature_dimensions.append(input_feature_list[feature_id].shape[1])
+  return feature_dimensions
+
+
+def _get_float_feature_columns(sorted_feature_columns):
+  """Get float feature columns.
+
+  Args:
+    sorted_feature_columns: a list of feature columns sorted by name.
+
+  Returns:
+    float_columns: a list of float feature columns sorted by name.
+  """
+  float_columns = []
+  for feature_column in sorted_feature_columns:
+    if _is_numeric_column(feature_column):
+      float_columns.append(feature_column)
+  return float_columns
+
+
+def _apply_feature_transformations(features, feature_columns):
+  """Applies feature column transformations to the provided features.
+
+  Supports V1 and V2 FeatureColumns.
+
+  Args:
+    features: a dicionary of feature name to Tensor.
+    feature_columns: an iterable of tf.feature_columns.
+
+  Returns:
+    A dict from feature_column to transformed feature tensor.
+  """
+  v2_columns, v1_columns = [], []
+  for fc in feature_columns:
+    if feature_column_lib.is_feature_column_v2([fc]):
+      v2_columns.append(fc)
+    else:
+      v1_columns.append(fc)
+
+  if v2_columns:
+    state_manager = feature_column_v2._StateManagerImpl(
+        layer=None, trainable=False)
+
+    transformed_columns = feature_column_v2._transform_features_v2(
+        features, v2_columns, state_manager)
+  else:
+    transformed_columns = {}
+  if v1_columns:
+    transformed_columns.update(fc_old._transform_features(features, v1_columns))
+  return transformed_columns
+
+
+def _get_transformed_features(
+    features,
+    sorted_feature_columns,
+    bucket_boundaries_dict=None,
+):
+  """Gets the transformed features from features/feature_columns pair.
+
+  Args:
+    features: a dicionary of name to Tensor.
+    sorted_feature_columns: a list/set of tf.feature_column, sorted by name.
+    bucket_boundaries_dict: a dict of name to list of Tensors.
+
+  Returns:
+    result_features: a list of the transformed features, sorted by the name.
+
+  Raises:
+    ValueError: when unsupported features/columns are tried.
+  """
+  return _get_transformed_features_and_merge_with_previously_transformed(
+      features, sorted_feature_columns, sorted_feature_columns,
+      bucket_boundaries_dict)
+
+
+def _get_transformed_features_and_merge_with_previously_transformed(
+    features,
+    sorted_feature_columns,
+    all_sorted_columns,
+    bucket_boundaries_dict=None,
+    already_transformed_features={},
+):
+  """Gets the transformed features from features/feature_columns pair.
+
+  This signature allows to pass in previously transformed features.
+
+  Args:
+    features: a dicionary of name to Tensor.
+    sorted_feature_columns: a list/set of tf.feature_column, sorted by name, to
+      be used for transforming features.
+    all_sorted_columns: a total list of feature columns, including those that
+      were already used for transformation.
+    bucket_boundaries_dict: a dict of name to list of Tensors.
+    already_transformed_features: features that were already transformed (for
+      columns all_sorted_columns-sorted_feature_columns)
+
+  Returns:
+    result_features: a list of the transformed features, sorted by the name.
+
+  Raises:
+    ValueError: when unsupported features/columns are tried.
+  """
+  # pylint:disable=protected-access
+  transformed_features = _apply_feature_transformations(features,
+                                                        sorted_feature_columns)
+  result_features = []
+
+  if sorted_feature_columns != all_sorted_columns:
+    # Add previously transformed features.
+    transformed_features.update(already_transformed_features)
+
+  for column in all_sorted_columns:
+    if isinstance(
+        column,
+        (feature_column_lib.BucketizedColumn, fc_old._BucketizedColumn)):
+      source_name = column.source_column.name
+      bucketized_feature = transformed_features[column]
+      if len(bucketized_feature.shape) > 2:
+        raise ValueError('For now, only supports features equivalent to rank 2 '
+                         'but column `{}` got: {}'.format(
+                             source_name, features[source_name].shape))
+      result_features.append(bucketized_feature)
+    elif isinstance(
+        column, (feature_column_lib.IndicatorColumn, fc_old._IndicatorColumn)):
+      source_name = column.categorical_column.name
+      tensor = tf.cast(transformed_features[column], dtype=tf.dtypes.int32)
+      if len(tensor.shape) > 2:
+        raise ValueError('Rank of indicator column must be no more than 2, '
+                         'but column `{}` got: {}'.format(
+                             source_name, features[source_name].shape))
+      # TODO(nponomareva): consider treating as one, multi-dimensional feature
+      # now that multi-dimensional features are supported.
+      unstacked = [
+          tf.compat.v1.expand_dims(t, axis=1)
+          for t in tf.unstack(tensor, axis=1)
+      ]
+      result_features.extend(unstacked)
+    elif isinstance(column,
+                    (feature_column_lib.DenseColumn, fc_old._DenseColumn)):
+      source_name = column.name
+      tensor = transformed_features[column]
+      # TODO(tanzheny): Add support for multi dim with rank > 1
+      if _get_variable_shape(column).rank > 1:
+        raise ValueError('For now, we only support Dense column with rank of '
+                         '1, but column `{}` got: {}'.format(
+                             source_name, column.variable_shape))
+      # TODO(nponomareva): consider treating as one, multi-dimensional feature
+      # now that multi-dimensional features are supported.
+      if not bucket_boundaries_dict:
+        unstacked = [
+            tf.compat.v1.expand_dims(t, axis=1)
+            for t in tf.unstack(tensor, axis=1)
+        ]
+        result_features.extend(unstacked)
+      else:
+        unstacked = tf.unstack(tensor, axis=1)
+        assert source_name in bucket_boundaries_dict
+        num_float_features = (
+            _get_variable_shape(column)[0]
+            if _get_variable_shape(column).as_list() else 1)
+        assert num_float_features == len(bucket_boundaries_dict[source_name])
+        bucketized = boosted_trees_ops.boosted_trees_bucketize(
+            unstacked, bucket_boundaries_dict[source_name])
+        expanded = [tf.compat.v1.expand_dims(t, axis=1) for t in bucketized]
+        result_features.extend(expanded)
+    elif isinstance(
+        column,
+        (feature_column_lib.CategoricalColumn, fc_old._CategoricalColumn)):
+      # SparseTensor(indices=Tensor(..., shape= [?, 2]),
+      # values=Tensor(..., shape=[?]), dense_shape=Tensor([batch_size, 1],...)))
+      sparse_feature = transformed_features[column]
+      dense = tf.cast(tf.sparse.to_dense(sparse_feature), tf.dtypes.int32)
+      result_features.append(dense)
+    else:
+      raise ValueError('Got unexpected feature column type'.format(column))
+    # pylint:enable=protected-access
+
+  return result_features
+
+
+def _variable(initial_value, trainable=False, name=None):
+  """Stores a tensor as a local Variable for faster read."""
+  if tf.compat.forward_compatible(2019, 8, 8):
+    return tf.compat.v1.Variable(
+        initial_value=initial_value,
+        trainable=trainable,
+        validate_shape=False,
+        name=name,
+        use_resource=True)
+  return tf.compat.v1.Variable(
+      initial_value=initial_value,
+      trainable=trainable,
+      validate_shape=False,
+      name=name,
+      use_resource=False)
+
+
+def _group_features_by_num_buckets_and_split_type(sorted_feature_columns,
+                                                  num_quantiles):
+  """Group feature ids by number of buckets; determine when to equality split.
+
+  Derives the feature ids based on iterating through ordered feature columns
+  and groups them by the number of buckets each feature require. Returns a
+  sorted list of buckets, a list of lists of feature ids for each of those
+  buckets, and a list of lists containing split types for each feature.
+
+  Args:
+    sorted_feature_columns: a list/set of tf.feature_column sorted by name.
+    num_quantiles: int representing the number of quantile buckets for all
+      numeric columns.
+
+  Returns:
+    bucket_size_list: a list of required bucket sizes.
+    feature_ids_list: a list of lists of feature ids for each bucket size.
+    split_types_list: a list of lists indicating feature split type
+
+  Raises:
+    ValueError: when unsupported features columns are provided.
+  """
+  bucket_size_to_f_ids_and_splits = collections.OrderedDict()
+
+  # TODO(nponomareva) for now we preserve the previous functionality and bucket
+  # all numeric into the same num of buckets. Can be easily changed to using
+  # each numeric's real buckets num, but we need to test that it does not cause
+  # a performance hit.
+
+  max_buckets_for_bucketized = 2
+  max_buckets_for_indicator = 2
+  # We will replace this dummy key with the real max after we calculate it.
+  bucket_size_to_f_ids_and_splits.setdefault(_DUMMY_NUM_BUCKETS, [])
+
+  feature_idx = 0
+  # pylint:disable=protected-access
+  for column in sorted_feature_columns:
+    if isinstance(
+        column, (feature_column_lib.IndicatorColumn, fc_old._IndicatorColumn)):
+      num_categorical_features = column.categorical_column._num_buckets
+      for _ in range(num_categorical_features):
+        # We use bucket size of 2 for categorical.
+        bucket_size_to_f_ids_and_splits.setdefault(max_buckets_for_indicator,
+                                                   []).append(
+                                                       (feature_idx,
+                                                        _INEQUALITY_SPLIT))
+        feature_idx += 1
+    elif isinstance(
+        column,
+        (feature_column_lib.BucketizedColumn, fc_old._BucketizedColumn)):
+      max_buckets_for_bucketized = max(max_buckets_for_bucketized,
+                                       len(column.boundaries) + 1)
+      bucket_size_to_f_ids_and_splits[_DUMMY_NUM_BUCKETS].append(
+          (feature_idx, _INEQUALITY_SPLIT))
+      feature_idx += 1
+    elif isinstance(column,
+                    (feature_column_lib.DenseColumn, fc_old._DenseColumn)):
+      num_float_features = _get_variable_shape(
+          column)[0] if _get_variable_shape(column).as_list() else 1
+      for _ in range(num_float_features):
+        bucket_size_to_f_ids_and_splits.setdefault(num_quantiles, []).append(
+            (feature_idx, _INEQUALITY_SPLIT))
+        feature_idx += 1
+    elif isinstance(
+        column,
+        (feature_column_lib.CategoricalColumn, fc_old._CategoricalColumn)):
+      bucket_size_to_f_ids_and_splits.setdefault(column.num_buckets, [])
+      bucket_size_to_f_ids_and_splits[column.num_buckets].append(
+          (feature_idx, _EQUALITY_SPLIT))
+      feature_idx += 1
+    else:
+      raise ValueError('Got unexpected feature column type: {}'.format(column))
+
+  # Replace the dummy key with the real max num of buckets for all bucketized
+  # columns.
+  bucketized_feature_ids = bucket_size_to_f_ids_and_splits[_DUMMY_NUM_BUCKETS]
+  if max_buckets_for_bucketized in bucket_size_to_f_ids_and_splits:
+    bucket_size_to_f_ids_and_splits[max_buckets_for_bucketized].extend(
+        bucketized_feature_ids)
+  elif bucketized_feature_ids:
+    bucket_size_to_f_ids_and_splits[
+        max_buckets_for_bucketized] = bucketized_feature_ids
+  del bucket_size_to_f_ids_and_splits[_DUMMY_NUM_BUCKETS]
+
+  # pylint:enable=protected-access
+  bucket_size_list = []
+  feature_ids_list = []
+  split_types_list = []
+  for bucket, f_ixs_and_splits in bucket_size_to_f_ids_and_splits.items():
+    bucket_size_list.append(bucket)
+    feature_ids, split_types = zip(*f_ixs_and_splits)
+    feature_ids_list.append(feature_ids)
+    split_types_list.append(split_types)
+  return bucket_size_list, feature_ids_list, split_types_list
+
+
+def _calculate_num_features(sorted_feature_columns):
+  """Calculate the total number of features."""
+  num_features = 0
+  # pylint:disable=protected-access
+  for column in sorted_feature_columns:
+    if isinstance(
+        column, (fc_old._IndicatorColumn, feature_column_lib.IndicatorColumn)):
+      num_features += column.categorical_column._num_buckets
+    elif isinstance(
+        column,
+        (fc_old._BucketizedColumn, feature_column_lib.BucketizedColumn)):
+      num_features += 1
+    elif isinstance(column,
+                    (feature_column_lib.DenseColumn, fc_old._DenseColumn)):
+      num_features += _get_variable_shape(column)[0] if _get_variable_shape(
+          column).as_list() else 1
+    elif isinstance(
+        column,
+        (feature_column_lib.CategoricalColumn, fc_old._CategoricalColumn)):
+      num_features += 1
+    else:
+      raise ValueError('Got unexpected feature column type'.format(column))
+  # pylint:enable=protected-access
+  return num_features
+
+
+def _generate_feature_col_name_mapping(sorted_feature_columns):
+  """Return a list of feature column names for feature ids.
+
+    Example:
+
+    ```
+    gender_col = indicator_column(
+        categorical_column_with_vocabulary_list(
+          'gender', ['male', 'female', 'n/a']))
+    # Results in 3 binary features for which we store the mapping to the
+    # original feature column.
+    _generate_feature_col_name_mapping([gender_col])
+    ['gender', 'gender', 'gender]
+    ```
+
+  Args:
+    sorted_feature_columns: a list/set of tf.feature_column sorted by name.
+
+  Returns:
+    feature_col_name_mapping: a list of feature column names indexed by the
+    feature ids.
+
+  Raises:
+    ValueError: when unsupported features/columns are tried.
+  """
+  # pylint:disable=protected-access
+  names = []
+  for column in sorted_feature_columns:
+    if isinstance(
+        column, (feature_column_lib.IndicatorColumn, fc_old._IndicatorColumn)):
+      categorical_column = column.categorical_column
+      if hasattr(categorical_column, 'num_buckets'):
+        one_hot_depth = categorical_column.num_buckets
+      else:
+        assert hasattr(categorical_column, '_num_buckets')
+        one_hot_depth = categorical_column._num_buckets
+      for _ in range(one_hot_depth):
+        names.append(categorical_column.name)
+    elif isinstance(
+        column,
+        (feature_column_lib.BucketizedColumn, fc_old._BucketizedColumn)):
+      names.append(column.name)
+    elif isinstance(column,
+                    (fc_old._DenseColumn, feature_column_lib.DenseColumn)):
+      num_float_features = _get_variable_shape(
+          column)[0] if _get_variable_shape(column).as_list() else 1
+      for _ in range(num_float_features):
+        names.append(column.name)
+    elif isinstance(
+        column,
+        (feature_column_lib.CategoricalColumn, fc_old._CategoricalColumn)):
+      names.append(column.name)
+    else:
+      raise ValueError('Got unexpected feature column type'.format(column))
+  return names
+  # pylint:enable=protected-access
+
+
+def _cond(var, true_branch, false_branch, name=None):
+  if tf.compat.forward_compatible(2019, 8, 8):
+    # Always force to use cond v2 (even in v1 setting).
+    return cond_v2.cond_v2(var, true_branch, false_branch, name=name)
+
+  @contextlib.contextmanager
+  def disable_control_flow_v2():
+    control_flow_v2_enabled = tf.compat.v1.control_flow_v2_enabled()
+    tf.compat.v1.disable_control_flow_v2()
+    yield
+    if control_flow_v2_enabled:
+      tf.compat.v1.enable_control_flow_v2()
+
+  with disable_control_flow_v2():
+    return tf.compat.v1.cond(
+        tf.math.logical_and(var, tf.constant(True)),
+        true_branch,
+        false_branch,
+        name=name)
+
+
+def _accumulator(dtype, shape, shared_name):
+  return tf.compat.v1.ConditionalAccumulator(
+      dtype=dtype, shape=shape, shared_name=shared_name)
+
+
+def _cache_transformed_features(features, sorted_feature_columns, ind_columns,
+                                other_columns, batch_size,
+                                bucket_boundaries_dict, are_boundaries_ready):
+  """Transform features and cache, then returns (cached_features, cache_op)."""
+  num_features = _calculate_num_features(sorted_feature_columns)
+  cached_features = []
+
+  # TODO(crawles): figure out how to get feature dimension here. Right now it's
+  # hard coded as 1.
+  cached_features = [
+      _variable(
+          tf.zeros([batch_size, 1], dtype=tf.dtypes.int32),
+          name='cached_feature_{}'.format(i)) for i in range(num_features)
+  ]
+  are_features_cached = _variable(False, name='are_features_cached')
+
+  # An ugly hack - for indicator features, in order to have lookup tables
+  # initialized, transform should happen outside of cond. So we always transform
+  # ind columns separately (it is not as expensive as bucketizing) and then
+  # merge these processed features with other columns in cond branches.
+  ind_transformed = []
+  if len(ind_columns) > 0:
+    ind_transformed = _apply_feature_transformations(features, ind_columns)
+
+  def get_features_without_cache():
+    """Returns transformed features"""
+    transformed_features = _get_transformed_features_and_merge_with_previously_transformed(
+        features, other_columns, sorted_feature_columns, bucket_boundaries_dict,
+        ind_transformed)
+
+    return transformed_features, tf.no_op()
+
+  def get_features_with_cache():
+    """Either returns from cache or transforms and caches features."""
+
+    def _cache_features_and_return():
+      """Caches transformed features.
+
+      The intention is to hide get_transformed_features() from the graph by
+      caching the result except the first step, since bucketize operation
+      (inside get_transformed_features) is expensive.
+
+      Returns:
+        input_feature_list: a list of input features.
+        cache_flip_op: op to add to graph to make sure cache update is included
+        to
+            the graph.
+      """
+      transformed_features = _get_transformed_features_and_merge_with_previously_transformed(
+          features, other_columns, sorted_feature_columns,
+          bucket_boundaries_dict, ind_transformed)
+
+      cached = [
+          tf.compat.v1.assign(cached_features[i], transformed_features[i])
+          for i in range(num_features)
+      ]
+      # TODO(youngheek): Try other combination of dependencies so that the
+      # function returns a single result, not a tuple.
+      with tf.control_dependencies(cached):
+        cache_flip_op = are_features_cached.assign(True)
+      return cached, cache_flip_op
+
+    return _cond(are_features_cached, lambda: (cached_features, tf.no_op()),
+                 _cache_features_and_return)
+
+  input_feature_list, cache_flip_op = _cond(are_boundaries_ready,
+                                            get_features_without_cache,
+                                            get_features_with_cache)
+
+  return input_feature_list, cache_flip_op
+
+
+class _CacheTrainingStatesUsingHashTable(object):
+  """Caching logits, etc. using MutableHashTable."""
+
+  def __init__(self, example_ids, logits_dimension):
+    """Creates a cache with the given configuration.
+
+    It maintains a MutableDenseHashTable for all values.
+    The API lookup() and insert() would have those specs,
+      tree_ids: shape=[batch_size], dtype=int32
+      node_ids: shape=[batch_size], dtype=int32
+      logits: shape=[batch_size, logits_dimension], dtype=float32
+    However in the MutableDenseHashTable, ids are bitcasted into float32 and
+    all values are concatenated as a single tensor (of float32).
+
+    Hence conversion happens internally before inserting to the HashTable and
+    after lookup from it.
+
+    Args:
+      example_ids: a Rank 1 tensor to be used as a key of the cache.
+      logits_dimension: a constant (int) for the dimension of logits.
+
+    Raises:
+      ValueError: if example_ids is other than int64 or string.
+    """
+    if tf.dtypes.as_dtype(tf.dtypes.int64).is_compatible_with(
+        example_ids.dtype):
+      empty_key = -1 << 62
+      deleted_key = -1 << 61
+    elif tf.dtypes.as_dtype(tf.dtypes.string).is_compatible_with(
+        example_ids.dtype):
+      empty_key = ''
+      deleted_key = 'NEVER_USED_DELETED_KEY'
+    else:
+      raise ValueError('Unsupported example_id_feature dtype %s.' %
+                       example_ids.dtype)
+    # Cache holds latest <tree_id, node_id, logits> for each example.
+    # tree_id and node_id are both int32 but logits is a float32.
+    # To reduce the overhead, we store all of them together as float32 and
+    # bitcast the ids to int32.
+    self._table_ref = lookup_ops.mutable_dense_hash_table_v2(
+        empty_key=empty_key,
+        deleted_key=deleted_key,
+        value_dtype=tf.dtypes.float32,
+        value_shape=[3])
+    self._example_ids = ops.convert_to_tensor(example_ids)
+    if self._example_ids.shape.ndims not in (None, 1):
+      raise ValueError('example_id should have rank 1, but got %s' %
+                       self._example_ids)
+    self._logits_dimension = logits_dimension
+
+  def lookup(self):
+    """Returns cached_tree_ids, cached_node_ids, cached_logits."""
+    cached_tree_ids, cached_node_ids, cached_logits = tf.split(
+        lookup_ops.lookup_table_find_v2(
+            self._table_ref,
+            self._example_ids,
+            default_value=[0.0, _DUMMY_NODE_ID, 0.0]),
+        [1, 1, self._logits_dimension],
+        axis=1)
+    cached_tree_ids = tf.compat.v1.squeeze(
+        tf.bitcast(cached_tree_ids, tf.dtypes.int32))
+    cached_node_ids = tf.compat.v1.squeeze(
+        tf.bitcast(cached_node_ids, tf.dtypes.int32))
+    if self._example_ids.shape.ndims is not None:
+      cached_logits.set_shape(
+          [self._example_ids.shape[0], self._logits_dimension])
+    return (cached_tree_ids, cached_node_ids, cached_logits)
+
+  def insert(self, tree_ids, node_ids, logits):
+    """Inserts values and returns the op."""
+    insert_op = lookup_ops.lookup_table_insert_v2(
+        self._table_ref, self._example_ids,
+        tf.concat([
+            tf.compat.v1.expand_dims(
+                tf.bitcast(tree_ids, tf.dtypes.float32), 1),
+            tf.compat.v1.expand_dims(
+                tf.bitcast(node_ids, tf.dtypes.float32), 1),
+            logits,
+        ],
+                  axis=1,
+                  name='value_concat_for_cache_insert'))
+    return insert_op
+
+
+class _CacheTrainingStatesUsingVariables(object):
+  """Caching logits, etc. using Variables."""
+
+  def __init__(self, batch_size, logits_dimension):
+    """Creates a cache with the given configuration.
+
+    It maintains three variables, tree_ids, node_ids, logits, for caching.
+      tree_ids: shape=[batch_size], dtype=int32
+      node_ids: shape=[batch_size], dtype=int32
+      logits: shape=[batch_size, logits_dimension], dtype=float32
+
+    Note, this can be used only with in-memory data setting.
+
+    Args:
+      batch_size: `int`, the size of the cache.
+      logits_dimension: a constant (int) for the dimension of logits.
+    """
+    self._logits_dimension = logits_dimension
+    self._tree_ids = _variable(
+        tf.zeros([batch_size], dtype=tf.dtypes.int32), name='tree_ids_cache')
+    self._node_ids = _variable(
+        _DUMMY_NODE_ID * tf.ones([batch_size], dtype=tf.dtypes.int32),
+        name='node_ids_cache')
+    self._logits = _variable(
+        tf.zeros([batch_size, logits_dimension], dtype=tf.dtypes.float32),
+        name='logits_cache')
+
+  def lookup(self):
+    """Returns cached_tree_ids, cached_node_ids, cached_logits."""
+    return (self._tree_ids, self._node_ids, self._logits)
+
+  def insert(self, tree_ids, node_ids, logits):
+    """Inserts values and returns the op."""
+    return tf.group([
+        self._tree_ids.assign(tree_ids),
+        self._node_ids.assign(node_ids),
+        self._logits.assign(logits)
+    ],
+                    name='cache_insert')
+
+
+class _StopAtAttemptsHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop at the number of attempts."""
+
+  def __init__(self, num_finalized_trees_tensor, num_attempted_layers_tensor,
+               max_trees, max_depth):
+    self._num_finalized_trees_tensor = num_finalized_trees_tensor
+    self._num_attempted_layers_tensor = num_attempted_layers_tensor
+    self._max_trees = max_trees
+    self._max_depth = max_depth
+
+  def before_run(self, run_context):
+    return tf.compat.v1.train.SessionRunArgs(
+        [self._num_finalized_trees_tensor, self._num_attempted_layers_tensor])
+
+  def after_run(self, run_context, run_values):
+    # num_* tensors should be retrieved by a separate session than the training
+    # one, in order to read the values after growing.
+    # So, if it's approaching to the limit, get the actual value by additional
+    # session.
+    num_finalized_trees, num_attempted_layers = run_values.results
+    if (num_finalized_trees >= self._max_trees - 1 or
+        num_attempted_layers > 2 * self._max_trees * self._max_depth - 1):
+      num_finalized_trees, num_attempted_layers = run_context.session.run(
+          [self._num_finalized_trees_tensor, self._num_attempted_layers_tensor])
+    if (num_finalized_trees >= self._max_trees or
+        num_attempted_layers > 2 * self._max_trees * self._max_depth):
+      run_context.request_stop()
+
+
+def _get_max_splits(tree_hparams):
+  """Calculates the max possible number of splits based on tree params."""
+  # maximum number of splits possible in the whole tree =2^(D-1)-1
+  max_splits = (1 << tree_hparams.max_depth) - 1
+  return max_splits
+
+
+class _EnsembleGrower(object):
+  """Abstract base class for different types of ensemble growers.
+
+  Use it to receive training ops for growing and centering bias, depending
+  on the implementation (for example, in memory or accumulator-based
+  distributed):
+    grower = ...create subclass grower(tree_ensemble, tree_hparams)
+    grow_op = grower.grow_tree(stats_summaries_list, last_layer_nodes_range,
+                               split_types_list)
+    training_ops.append(grow_op)
+  """
+
+  def __init__(self, tree_ensemble, quantile_accumulator, tree_hparams,
+               feature_ids_list, logits_dimension):
+    """Initializes a grower object.
+
+    Args:
+      tree_ensemble: A TreeEnsemble variable.
+      quantile_accumulator: A QuantileAccumulator variable.
+      tree_hparams: TODO. collections.namedtuple for hyper parameters.
+      feature_ids_list: a list of lists of feature ids for each bucket size.
+      logits_dimension: a constant (int) for the dimension of logits.
+
+    Raises:
+      ValueError: when pruning mode is invalid or pruning is used and no tree
+      complexity is set.
+    """
+    self._tree_ensemble = tree_ensemble
+    self._tree_hparams = tree_hparams
+    self._quantile_accumulator = quantile_accumulator
+    self._feature_ids_list = feature_ids_list
+    self._logits_dimension = logits_dimension
+    # pylint: disable=protected-access
+    self._pruning_mode_parsed = boosted_trees_ops.PruningMode.from_str(
+        tree_hparams.pruning_mode)
+
+    if tree_hparams.tree_complexity > 0:
+      if self._pruning_mode_parsed == boosted_trees_ops.PruningMode.NO_PRUNING:
+        raise ValueError(
+            'Tree complexity have no effect unless pruning mode is chosen.')
+    else:
+      if self._pruning_mode_parsed != boosted_trees_ops.PruningMode.NO_PRUNING:
+        raise ValueError('For pruning, tree_complexity must be positive.')
+    # pylint: enable=protected-access
+
+  @abc.abstractmethod
+  def accumulate_quantiles(self, float_features, weights, are_boundaries_ready):
+    """Accumulate quantile information for float features.
+
+    Args:
+      float_features: float features.
+      weights: weights Tensor.
+      are_boundaries_ready: bool variable.
+
+    Returns:
+      An operation for accumulate quantile.
+    """
+
+  @abc.abstractmethod
+  def center_bias(self, center_bias_var, gradients, hessians):
+    """Centers bias, if ready, based on statistics.
+
+    Args:
+      center_bias_var: A variable that will be updated when bias centering
+        finished.
+      gradients: A rank 2 tensor of gradients.
+      hessians: A rank 2 tensor of hessians.
+
+    Returns:
+      An operation for centering bias.
+    """
+
+  @abc.abstractmethod
+  def grow_tree(self, stats_summaries_list, last_layer_nodes_range,
+                split_types_list):
+    """Grows a tree, if ready, based on provided statistics.
+
+    Args:
+      stats_summaries_list: List of stats summary tensors, representing sums of
+        gradients and hessians for each feature bucket.
+      last_layer_nodes_range: A tensor representing ids of the nodes in the
+        current layer, to be split.
+      split_types_list: a list of lists indicating feature split type
+
+    Returns:
+      An op for growing a tree.
+    """
+
+  def chief_init_op(self):
+    """Ops that chief needs to run to initialize the state."""
+    return tf.no_op()
+
+  #  ============= Helper methods ===========
+
+  def _center_bias_fn(self, center_bias_var, mean_gradients, mean_hessians):
+    """Updates the ensembles and cache (if needed) with logits prior."""
+    continue_centering = boosted_trees_ops.center_bias(
+        self._tree_ensemble.resource_handle,
+        mean_gradients=mean_gradients,
+        mean_hessians=mean_hessians,
+        l1=self._tree_hparams.l1,
+        l2=self._tree_hparams.l2)
+    return center_bias_var.assign(continue_centering)
+
+  def _grow_tree_from_stats_summaries(self, stats_summaries_list,
+                                      last_layer_nodes_range, split_types_list):
+    """Updates ensemble based on the best gains from stats summaries."""
+    assert (len(stats_summaries_list) == len(self._feature_ids_list) ==
+            len(split_types_list))
+
+    # These lists will be of size num_buckets. Each element will be a list.
+    node_ids_list = []
+    gains_list = []
+    best_feature_ids_list = []
+    best_feature_dimensions_list = []
+    best_feature_splits_list = []
+    thresholds_list = []
+    left_node_contribs_list = []
+    right_node_contribs_list = []
+    for bucket_ix in range(len(self._feature_ids_list)):
+      (node_ids, gains, best_feature_ids, best_feature_dimensions, thresholds,
+       left_node_contribs, right_node_contribs, best_feature_split_types) = (
+           boosted_trees_ops.calculate_best_feature_split_v2(
+               last_layer_nodes_range,
+               stats_summaries_list[bucket_ix],
+               candidate_feature_ids=self._feature_ids_list[bucket_ix],
+               split_types=split_types_list[bucket_ix],
+               l1=self._tree_hparams.l1,
+               l2=self._tree_hparams.l2,
+               tree_complexity=self._tree_hparams.tree_complexity,
+               min_node_weight=self._tree_hparams.min_node_weight,
+               logits_dimension=self._logits_dimension))
+      node_ids_list.append(node_ids)
+      gains_list.append(gains)
+      best_feature_ids_list.append(best_feature_ids)
+      best_feature_dimensions_list.append(best_feature_dimensions)
+      best_feature_splits_list.append(best_feature_split_types)
+      thresholds_list.append(thresholds)
+      left_node_contribs_list.append(left_node_contribs)
+      right_node_contribs_list.append(right_node_contribs)
+    grow_op = boosted_trees_ops.update_ensemble_v2(
+        # Confirm if local_tree_ensemble or tree_ensemble should be used.
+        self._tree_ensemble.resource_handle,
+        feature_ids=best_feature_ids_list,
+        dimension_ids=best_feature_dimensions_list,
+        node_ids=node_ids_list,
+        gains=gains_list,
+        thresholds=thresholds_list,
+        left_node_contribs=left_node_contribs_list,
+        right_node_contribs=right_node_contribs_list,
+        split_types=best_feature_splits_list,
+        learning_rate=self._tree_hparams.learning_rate,
+        max_depth=self._tree_hparams.max_depth,
+        pruning_mode=self._pruning_mode_parsed,
+        logits_dimension=self._logits_dimension)
+    return grow_op
+
+
+class _InMemoryEnsembleGrower(_EnsembleGrower):
+  """An in-memory ensemble grower."""
+
+  def __init__(self, tree_ensemble, quantile_accumulator, tree_hparams,
+               feature_ids_list, logits_dimension):
+
+    super(_InMemoryEnsembleGrower, self).__init__(
+        tree_ensemble=tree_ensemble,
+        quantile_accumulator=quantile_accumulator,
+        tree_hparams=tree_hparams,
+        feature_ids_list=feature_ids_list,
+        logits_dimension=logits_dimension)
+
+  def accumulate_quantiles(self, float_features, weights, are_boundaries_ready):
+    summary_op = self._quantile_accumulator.add_summaries(
+        float_features, weights)
+    with tf.control_dependencies([summary_op]):
+      flush = self._quantile_accumulator.flush()
+      with tf.control_dependencies([flush]):
+        return are_boundaries_ready.assign(True).op
+
+  def center_bias(self, center_bias_var, gradients, hessians):
+    # For in memory, we already have a full batch of gradients and hessians,
+    # so just take a mean and proceed with centering.
+    mean_gradients = tf.compat.v1.expand_dims(
+        tf.math.reduce_mean(gradients, 0), 0)
+    mean_heassians = tf.compat.v1.expand_dims(
+        tf.math.reduce_mean(hessians, 0), 0)
+    return self._center_bias_fn(center_bias_var, mean_gradients, mean_heassians)
+
+  def grow_tree(self, stats_summaries_list, last_layer_nodes_range,
+                split_types_list):
+    # For in memory, we already have full data in one batch, so we can grow the
+    # tree immediately.
+    return self._grow_tree_from_stats_summaries(stats_summaries_list,
+                                                last_layer_nodes_range,
+                                                split_types_list)
+
+
+class _AccumulatorEnsembleGrower(_EnsembleGrower):
+  """An accumulator based ensemble grower."""
+
+  def __init__(self, tree_ensemble, quantile_accumulator, tree_hparams,
+               stamp_token, n_batches_per_layer, bucket_size_list, is_chief,
+               center_bias, feature_ids_list, logits_dimension,
+               feature_dimensions):
+    super(_AccumulatorEnsembleGrower, self).__init__(
+        tree_ensemble=tree_ensemble,
+        quantile_accumulator=quantile_accumulator,
+        tree_hparams=tree_hparams,
+        feature_ids_list=feature_ids_list,
+        logits_dimension=logits_dimension)
+    self._stamp_token = stamp_token
+    self._n_batches_per_layer = n_batches_per_layer
+    self._bucket_size_list = bucket_size_list
+    self._is_chief = is_chief
+    self._feature_dimensions = feature_dimensions
+    self._growing_accumulators = []
+    self._chief_init_ops = []
+    max_splits = _get_max_splits(self._tree_hparams)
+    i = 0
+    # TODO(crawles): Allow to create an accumulator per feature, instead of
+    # accumulator per bucket_size.
+    for bucket_size, feature_ids in zip(self._bucket_size_list,
+                                        self._feature_ids_list):
+      grad_size = logits_dimension
+      hessian_size = logits_dimension * logits_dimension
+      accumulator = _accumulator(
+          dtype=tf.dtypes.float32,
+          # The stats consist of grads and hessians (the last dimension).
+          shape=[
+              len(feature_ids),
+              max_splits,
+              1,  # TODO(crawles): Support multi_dim features.
+              bucket_size + 1,  # +1 for missing/default bucket.
+              grad_size + hessian_size
+          ],
+          shared_name='numeric_stats_summary_accumulator_' + str(i))
+      i += 1
+      self._chief_init_ops.append(
+          accumulator.set_global_step(self._stamp_token))
+      self._growing_accumulators.append(accumulator)
+    self._center_bias = center_bias
+    if center_bias:
+      self._bias_accumulator = _accumulator(
+          dtype=tf.dtypes.float32,
+          # The stats consist of grads and hessians means only.
+          # TODO(nponomareva): this will change for a multiclass
+          shape=[2, 1],
+          shared_name='bias_accumulator')
+      self._chief_init_ops.append(
+          self._bias_accumulator.set_global_step(self._stamp_token))
+
+  def accumulate_quantiles(self, float_features, weights, are_boundaries_ready):
+    summary_op = self._quantile_accumulator.add_summaries(
+        float_features, weights)
+    cond_accum = _accumulator(
+        dtype=tf.dtypes.float32, shape={}, shared_name='quantile_summary_accum')
+    cond_accum_step = cond_accum.set_global_step(self._stamp_token)
+    apply_grad = cond_accum.apply_grad(tf.constant(0.), self._stamp_token)
+    update_quantile_op = tf.group(summary_op, cond_accum_step, apply_grad)
+    if not self._is_chief:
+      return update_quantile_op
+
+    with tf.control_dependencies([update_quantile_op]):
+
+      def flush_fn():
+        grad = cond_accum.take_grad(1)
+        flush_op = self._quantile_accumulator.flush()
+        boundaries_ready_op = are_boundaries_ready.assign(True).op
+        return tf.group(flush_op, grad, boundaries_ready_op)
+
+      finalize_quantile_op = _cond(
+          tf.math.greater_equal(cond_accum.num_accumulated(),
+                                self._n_batches_per_layer),
+          flush_fn,
+          tf.no_op,
+          name='wait_until_quaniles_accumulated')
+    return finalize_quantile_op
+
+  def center_bias(self, center_bias_var, gradients, hessians):
+    # For not in memory situation, we need to accumulate enough of batches first
+    # before proceeding with centering bias.
+
+    # Create an accumulator.
+    if not self._center_bias:
+      raise RuntimeError('center_bias called but bias centering is disabled.')
+    bias_dependencies = []
+    grads_and_hess = tf.stack([gradients, hessians], axis=0)
+    grads_and_hess = tf.math.reduce_mean(grads_and_hess, axis=1)
+
+    apply_grad = self._bias_accumulator.apply_grad(grads_and_hess,
+                                                   self._stamp_token)
+    bias_dependencies.append(apply_grad)
+
+    # Center bias if enough batches were processed.
+    with tf.control_dependencies(bias_dependencies):
+      if not self._is_chief:
+        return tf.no_op()
+
+      def _set_accumulators_stamp():
+        return tf.group([
+            acc.set_global_step(self._stamp_token + 1)
+            for acc in self._growing_accumulators
+        ])
+
+      def center_bias_from_accumulator():
+        accumulated = tf.unstack(self._bias_accumulator.take_grad(1), axis=0)
+        center_bias_op = self._center_bias_fn(
+            center_bias_var, tf.compat.v1.expand_dims(accumulated[0], 0),
+            tf.compat.v1.expand_dims(accumulated[1], 0))
+        with tf.control_dependencies([center_bias_op]):
+          return _cond(center_bias_var, tf.no_op, _set_accumulators_stamp)
+
+      center_bias_op = _cond(
+          tf.math.greater_equal(self._bias_accumulator.num_accumulated(),
+                                self._n_batches_per_layer),
+          center_bias_from_accumulator,
+          tf.no_op,
+          name='wait_until_n_batches_for_bias_accumulated')
+      return center_bias_op
+
+  def grow_tree(self, stats_summaries_list, last_layer_nodes_range,
+                split_types_list):
+    dependencies = []
+    for i in range(len(self._feature_ids_list)):
+      stats_summaries = stats_summaries_list[i]
+      apply_grad = self._growing_accumulators[i].apply_grad(
+          tf.stack(stats_summaries, axis=0), self._stamp_token)
+      dependencies.append(apply_grad)
+
+    # Grow the tree if enough batches is accumulated.
+    with tf.control_dependencies(dependencies):
+      if not self._is_chief:
+        return tf.no_op()
+
+      min_accumulated = tf.math.reduce_min(
+          tf.stack(
+              [acc.num_accumulated() for acc in self._growing_accumulators]))
+
+      def grow_tree_from_accumulated_summaries_fn():
+        """Updates tree with the best layer from accumulated summaries."""
+        # Take out the accumulated summaries from the accumulator and grow.
+        accumulated_summaries_list = [
+            tf.unstack(accumulator.take_grad(1), axis=0)
+            for accumulator in self._growing_accumulators
+        ]
+        grow_op = self._grow_tree_from_stats_summaries(
+            accumulated_summaries_list, last_layer_nodes_range,
+            split_types_list)
+        return grow_op
+
+      grow_model = _cond(
+          tf.math.greater_equal(min_accumulated, self._n_batches_per_layer),
+          grow_tree_from_accumulated_summaries_fn,
+          tf.no_op,
+          name='wait_until_n_batches_accumulated')
+      return grow_model
+
+  def chief_init_op(self):
+    """Ops that chief needs to run to initialize the state."""
+    return tf.group(self._chief_init_ops)
+
+
+def _bt_model_fn(features,
+                 labels,
+                 mode,
+                 head,
+                 feature_columns,
+                 tree_hparams,
+                 n_batches_per_layer,
+                 config,
+                 closed_form_grad_and_hess_fn=None,
+                 example_id_column_name=None,
+                 weight_column=None,
+                 train_in_memory=False,
+                 name='boosted_trees'):
+  """Gradient Boosted Trees model_fn.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
+      `int32` or `int64` in the range `[0, n_classes)`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `head_lib._Head` instance.
+    feature_columns: Iterable of `fc_old._FeatureColumn` model inputs.
+    tree_hparams: TODO. collections.namedtuple for hyper parameters.
+    n_batches_per_layer: A `Tensor` of `int64`. Each layer is built after at
+      least n_batches_per_layer accumulations.
+    config: `RunConfig` object to configure the runtime settings.
+    closed_form_grad_and_hess_fn: a function that accepts logits and labels and
+      returns gradients and hessians. By default, they are created by
+      tf.gradients() from the loss.
+    example_id_column_name: Name of the feature for a unique ID per example.
+      Currently experimental -- not exposed to public API.
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.fc_old.numeric_column` defining feature column representing weights.
+      It is used to downweight or boost examples during training. It will be
+      multiplied by the loss of the example. If it is a string, it is used as a
+      key to fetch weight tensor from the `features`. If it is a
+      `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+    train_in_memory: `bool`, when true, it assumes the dataset is in memory,
+      i.e., input_fn should return the entire dataset as a single batch,
+      n_batches_per_layer should be set as 1, num_worker_replicas should be 1,
+      and num_ps_replicas should be 0 in `tf.Estimator.RunConfig`.
+    name: Name to use for the model.
+
+  Returns:
+      An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: mode or params are invalid, or features has the wrong type.
+  """
+  logits_dimension = head.logits_dimension
+  sorted_feature_columns = sorted(feature_columns, key=lambda tc: tc.name)
+  float_columns = _get_float_feature_columns(sorted_feature_columns)
+
+  with ops.name_scope(name) as name:
+    # Prepare.
+    global_step = tf.compat.v1.train.get_or_create_global_step()
+    # Create Ensemble resources.
+    tree_ensemble = boosted_trees_ops.TreeEnsemble(name=name)
+
+    # Create Quantile accumulator resource.
+    eps = tree_hparams.quantile_sketch_epsilon
+    num_quantiles = int(1. / eps)
+    bucket_boundaries_dict = {}
+    quantile_accumulator = None
+
+    if float_columns:
+      num_float_features = _calculate_num_features(float_columns)
+      quantile_accumulator = boosted_trees_ops.QuantileAccumulator(
+          epsilon=eps,
+          num_streams=num_float_features,
+          num_quantiles=num_quantiles,
+          name=_QUANTILE_ACCUMULATOR_RESOURCE_NAME)
+      bucket_boundaries = quantile_accumulator.get_bucket_boundaries()
+      bucket_boundaries_dict = _get_float_boundaries_dict(
+          float_columns, bucket_boundaries)
+      are_boundaries_ready_initial = False
+    else:
+      are_boundaries_ready_initial = True
+
+    bucket_size_list, feature_ids_list, split_types_list = _group_features_by_num_buckets_and_split_type(
+        sorted_feature_columns, num_quantiles)
+
+    # Create logits.
+    if mode != ModeKeys.TRAIN:
+      input_feature_list = _get_transformed_features(features,
+                                                     sorted_feature_columns,
+                                                     bucket_boundaries_dict)
+      logits = boosted_trees_ops.predict(
+          # For non-TRAIN mode, ensemble doesn't change after initialization,
+          # so no local copy is needed; using tree_ensemble directly.
+          tree_ensemble_handle=tree_ensemble.resource_handle,
+          bucketized_features=input_feature_list,
+          logits_dimension=logits_dimension)
+      return head.create_estimator_spec(
+          features=features,
+          mode=mode,
+          labels=labels,
+          train_op_fn=tf.no_op,
+          logits=logits)
+
+    # ============== Training graph ==============
+    center_bias = tree_hparams.center_bias
+    is_single_machine = (config.num_worker_replicas <= 1)
+
+    if train_in_memory:
+      assert n_batches_per_layer == 1, (
+          'When train_in_memory is enabled, input_fn should return the entire '
+          'dataset as a single batch, and n_batches_per_layer should be set as '
+          '1.')
+      if (not config.is_chief or config.num_worker_replicas > 1 or
+          config.num_ps_replicas > 0):
+        raise ValueError('train_in_memory is supported only for '
+                         'non-distributed training.')
+    worker_device = tf.no_op().device
+    # Extract input features and set up cache for training.
+    training_state_cache = None
+
+    are_boundaries_ready = _variable(
+        initial_value=are_boundaries_ready_initial,
+        name='are_boundaries_ready',
+        trainable=False)
+
+    if train_in_memory:
+      # Cache transformed features as well for in-memory training.
+      batch_size = tf.compat.v1.shape(labels)[0]
+
+      def _split_into_indicator_and_other_columns():
+        indicator_columns = []
+        other_columns = []
+        for fc in sorted_feature_columns:
+          if isinstance(
+              fc,
+              (feature_column_lib.IndicatorColumn, fc_old._IndicatorColumn)):
+            indicator_columns.append(fc)
+          else:
+            other_columns.append(fc)
+        return indicator_columns, other_columns
+
+      # Split columns into indicator and other columns.
+      indicator_columns, other_columns = (
+          _split_into_indicator_and_other_columns())
+
+      input_feature_list, input_cache_op = _cache_transformed_features(
+          features, sorted_feature_columns, indicator_columns, other_columns,
+          batch_size, bucket_boundaries_dict, are_boundaries_ready)
+
+      training_state_cache = _CacheTrainingStatesUsingVariables(
+          batch_size, logits_dimension)
+    else:
+      input_feature_list = _get_transformed_features(features,
+                                                     sorted_feature_columns,
+                                                     bucket_boundaries_dict)
+      if example_id_column_name:
+        example_ids = features[example_id_column_name]
+        training_state_cache = _CacheTrainingStatesUsingHashTable(
+            example_ids, logits_dimension)
+    if training_state_cache:
+      cached_tree_ids, cached_node_ids, cached_logits = (
+          training_state_cache.lookup())
+    else:
+      # Always start from the beginning when no cache is set up.
+      batch_size = tf.compat.v1.shape(labels)[0]
+      cached_tree_ids, cached_node_ids, cached_logits = (
+          tf.zeros([batch_size], dtype=tf.dtypes.int32),
+          _DUMMY_NODE_ID * tf.ones([batch_size], dtype=tf.dtypes.int32),
+          tf.zeros([batch_size, logits_dimension], dtype=tf.dtypes.float32))
+
+    if is_single_machine:
+      local_tree_ensemble = tree_ensemble
+      ensemble_reload = tf.no_op()
+    else:
+      # Have a local copy of ensemble for the distributed setting.
+      with tf.compat.v1.device(worker_device):
+        local_tree_ensemble = boosted_trees_ops.TreeEnsemble(
+            name=name + '_local', is_local=True)
+      # TODO(soroush): Do partial updates if this becomes a bottleneck.
+      ensemble_reload = local_tree_ensemble.deserialize(
+          *tree_ensemble.serialize())
+    with tf.control_dependencies([ensemble_reload]):
+      (stamp_token, num_trees, num_finalized_trees, num_attempted_layers,
+       last_layer_nodes_range) = local_tree_ensemble.get_states()
+      partial_logits, tree_ids, node_ids = boosted_trees_ops.training_predict(
+          tree_ensemble_handle=local_tree_ensemble.resource_handle,
+          cached_tree_ids=cached_tree_ids,
+          cached_node_ids=cached_node_ids,
+          bucketized_features=input_feature_list,
+          logits_dimension=logits_dimension)
+    logits = cached_logits + partial_logits
+    if train_in_memory:
+      grower = _InMemoryEnsembleGrower(tree_ensemble, quantile_accumulator,
+                                       tree_hparams, feature_ids_list,
+                                       logits_dimension)
+    else:
+      feature_dimensions = _get_feature_dimensions(feature_ids_list,
+                                                   input_feature_list)
+      grower = _AccumulatorEnsembleGrower(tree_ensemble, quantile_accumulator,
+                                          tree_hparams, stamp_token,
+                                          n_batches_per_layer, bucket_size_list,
+                                          config.is_chief, center_bias,
+                                          feature_ids_list, logits_dimension,
+                                          feature_dimensions)
+
+    tf.compat.v1.summary.scalar('ensemble/num_trees', num_trees)
+    tf.compat.v1.summary.scalar('ensemble/num_finalized_trees',
+                                num_finalized_trees)
+    tf.compat.v1.summary.scalar('ensemble/num_attempted_layers',
+                                num_attempted_layers)
+
+    # Variable that determines whether bias centering is needed.
+    center_bias_var = _variable(
+        initial_value=center_bias, name='center_bias_needed', trainable=False)
+    if weight_column is None:
+      weights = tf.constant(1., shape=[1])
+    else:
+      if isinstance(weight_column, six.string_types):
+        weight_column = tf.feature_column.numeric_column(
+            key=weight_column, shape=(1,))
+      weights = tf.compat.v1.squeeze(
+          _get_transformed_features(features, [weight_column])[0], axis=1)
+
+    # Create training graph.
+    def _train_op_fn(loss):
+      """Run one training iteration."""
+
+      def _update_quantile_fn():
+        """Accumulates quantiles."""
+        with ops.name_scope('UpdateQuantile'):
+          float_features = _get_transformed_features(features, float_columns)
+          squeezed = [tf.compat.v1.squeeze(f, axis=1) for f in float_features]
+          return grower.accumulate_quantiles(squeezed, weights,
+                                             are_boundaries_ready)
+
+      def _grow_tree_fn():
+        """Grow tree."""
+        grow_op = [input_cache_op] if train_in_memory else []
+        if training_state_cache:
+          # Cache logits only after center_bias is complete,
+          # if it's in progress.
+          def insert_fn():
+            return training_state_cache.insert(tree_ids, node_ids, logits)
+
+          grow_op.append(_cond(center_bias_var, tf.no_op, insert_fn))
+
+        if logits_dimension == 1 and closed_form_grad_and_hess_fn:
+          gradients, hessians = closed_form_grad_and_hess_fn(logits, labels)
+        else:
+          gradients = tf.compat.v1.gradients(loss, logits, name='Gradients')[0]
+          if logits_dimension == 1:
+            hessians = tf.compat.v1.gradients(
+                gradients, logits, name='Hessians')[0]
+          else:
+            # TODO(crawles): support diagonal hessian.
+            hessians = _compute_full_hessian(gradients, logits, logits_dimension)
+        # TODO(youngheek): perhaps storage could be optimized by storing stats
+        # with the dimension max_splits_per_layer, instead of max_splits (for
+        # the entire tree).
+        max_splits = _get_max_splits(tree_hparams)
+        stats_summaries_list = []
+        for num_buckets, feature_ids in zip(bucket_size_list, feature_ids_list):
+          bucket_summaries = []
+          for feature_id in feature_ids:
+            stats_summary = boosted_trees_ops.boosted_trees_aggregate_stats(
+                node_ids=node_ids,
+                gradients=gradients,
+                hessians=hessians,
+                feature=input_feature_list[feature_id],
+                max_splits=max_splits,
+                num_buckets=num_buckets)
+            bucket_summaries.append(stats_summary)
+          stats_summaries_list.append(bucket_summaries)
+        if center_bias:
+          update_model = _cond(
+              center_bias_var,
+              functools.partial(grower.center_bias, center_bias_var, gradients,
+                                hessians),
+              functools.partial(grower.grow_tree, stats_summaries_list,
+                                last_layer_nodes_range, split_types_list))
+        else:
+          update_model = grower.grow_tree(stats_summaries_list,
+                                          last_layer_nodes_range,
+                                          split_types_list)
+        grow_op.append(update_model)
+
+        with tf.control_dependencies([update_model]):
+          increment_global = tf.compat.v1.assign_add(global_step, 1).op
+          grow_op.append(increment_global)
+
+        return tf.group(grow_op, name='grow_op')
+
+      if not float_columns:
+        return _grow_tree_fn()
+      else:
+        return _cond(are_boundaries_ready, _grow_tree_fn, _update_quantile_fn)
+
+  estimator_spec = head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      labels=labels,
+      train_op_fn=_train_op_fn,
+      logits=logits)
+  # Add an early stop hook.
+  estimator_spec = estimator_spec._replace(
+      training_hooks=estimator_spec.training_hooks +
+      (_StopAtAttemptsHook(num_finalized_trees, num_attempted_layers,
+                           tree_hparams.n_trees, tree_hparams.max_depth),),
+      training_chief_hooks=[GrowerInitializationHook(grower.chief_init_op())] +
+      list(estimator_spec.training_chief_hooks))
+  return estimator_spec
+
+
+class GrowerInitializationHook(tf.compat.v1.train.SessionRunHook):
+  """A SessionRunHook handles initialization of `_EnsembleGrower`."""
+
+  def __init__(self, init_op):
+    self._init_op = init_op
+
+  def after_create_session(self, session, coord):
+    session.run(self._init_op)
+
+
+# This is classical form of Maximum entropy loss, that is twice differentiable
+# (the built-in sparse_softmax_cross_entropy is not twice differentiable).
+def per_example_maxent_loss(labels, weights, logits, num_classes, eps=1e-15):
+  """Maximum entropy loss for multiclass problems.
+
+  Maximum entropy is a generalization of logistic loss for the case when more
+  than 2 classes are present.
+
+  Args:
+    labels: Rank 2 (N, 1) or Rank 1 (N) tensor of per-example labels.
+    weights: Rank 2 (N, 1) tensor of per-example weights.
+    logits: Rank 2 (N, K) tensor of per-example predictions, K - num of classes.
+    num_classes: number of classes in classification task. Used to expand label
+      indices into one-hot encodings.
+    eps: tolerance, used as a minimum possible value.
+
+  Returns:
+    loss: A Rank 2 (N, 1) tensor of per-example maxent loss
+    update_op: An update operation to update the loss's internal state.
+  """
+  labels = tf.cast(labels, tf.dtypes.int64)
+  # If labels are of rank 1, make them rank 2.
+  labels_shape = labels.get_shape()
+  if len(labels_shape) != 2:
+    labels = tf.compat.v1.expand_dims(labels, 1)
+  # Labels are indices of classes, convert them to one hot encodings.
+  target_one_hot = tf.one_hot(indices=labels, depth=num_classes)
+  labels = tf.math.reduce_sum(input_tensor=target_one_hot, axis=[1])
+  labels = tf.cast(labels, tf.dtypes.float32)
+
+  # Calculate softmax probabilities for each class.
+  unnormalized_probs = tf.math.exp(logits)
+  normalizers = tf.math.reduce_sum(unnormalized_probs, 1, keepdims=True)
+  softmax_predictions = tf.math.divide(unnormalized_probs,
+                                       tf.math.add(normalizers, eps))
+
+  # Pull out the probabilities for real label.
+  probs_for_real_class = tf.math.reduce_sum(labels * softmax_predictions, 1)
+
+  # Add handling for values near 0 and 1.
+  zeros = tf.compat.v1.zeros_like(
+      probs_for_real_class, dtype=logits.dtype) + eps
+  one_minus_eps = tf.compat.v1.ones_like(
+      probs_for_real_class, dtype=logits.dtype) - eps
+
+  # Take maximum(eps, pred)
+  cond = (probs_for_real_class >= eps)
+  probs_for_real_class = tf.compat.v1.where(cond, probs_for_real_class, zeros)
+
+  # Take minimum(1-eps, pred)
+  cond = (probs_for_real_class <= 1 - eps)
+  probs_for_real_class = tf.compat.v1.where(cond, probs_for_real_class,
+                                            one_minus_eps)
+
+  unweighted_loss = tf.compat.v1.expand_dims(-tf.math.log(probs_for_real_class),
+                                             1)
+  if weights is None:
+    return unweighted_loss, tf.no_op()
+  else:
+    return unweighted_loss * weights, tf.no_op()
+
+
+def _compute_full_hessian(grads, logits, logits_dimension):
+  """Computes hessians for full-hessian multiclass strategy."""
+  gradients_list = tf.compat.v1.unstack(
+      grads, num=logits_dimension, axis=1)
+  hessian_rows = []
+
+  for row in range(logits_dimension):
+    # If current row is i, K is number of classes,each row returns a tensor of
+    # size batch_size x K representing for each example dx_i dx_1, dx_i dx_2
+    # etc dx_i dx_K
+    hessian_row = tf.compat.v1.gradients(
+        gradients_list[row],
+        logits,
+        name='Hessian_%d' % row,
+        colocate_gradients_with_ops=False,
+        gate_gradients=0,
+        aggregation_method=None)
+
+    # hessian_row is of dimension 1, batch_size, K, => trim first dimension
+    # to get batch_size x K
+    hessian_row = tf.compat.v1.squeeze(tf.compat.v1.unstack(hessian_row), [0])
+    hessian_rows.append(hessian_row)
+  stacked = tf.compat.v1.stack(hessian_rows, axis=1)
+  return tf.compat.v1.reshape(stacked, (-1, logits_dimension**2))
+
+
+def _multiclass_head(
+    n_classes,
+    weight_column=None,
+    label_vocabulary=None,
+    loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_NONZERO_WEIGHTS):
+  """Core head for multiclass problems."""
+
+  def loss_fn(labels, logits):
+    result = per_example_maxent_loss(
+        labels=labels,
+        logits=logits,
+        weights=weight_column,
+        num_classes=n_classes)
+    return result[0]
+
+  # pylint:disable=protected-access
+  head_fn = head_lib._multi_class_head_with_softmax_cross_entropy_loss(
+      n_classes=n_classes,
+      loss_fn=loss_fn,
+      loss_reduction=loss_reduction,
+      label_vocabulary=label_vocabulary,
+      weight_column=weight_column)
+  # pylint:enable=protected-access
+
+  return head_fn
+
+
+def _create_classification_head(n_classes,
+                                weight_column=None,
+                                label_vocabulary=None):
+  """Creates a classification head. Refer to canned.head for details on args."""
+  if n_classes == 2:
+    # pylint: disable=protected-access
+    return head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_BATCH_SIZE)
+    # pylint: enable=protected-access
+  else:
+    return _multiclass_head(
+        n_classes,
+        weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)
+
+
+def _create_classification_head_and_closed_form(n_classes, weight_column,
+                                                label_vocabulary):
+  """Creates a head for classifier and the closed form gradients/hessians."""
+  head = _create_classification_head(n_classes, weight_column, label_vocabulary)
+  if (n_classes == 2 and head.logits_dimension == 1 and
+      weight_column is None and label_vocabulary is None):
+    # Use the closed-form gradients/hessians for 2 class.
+    def _grad_and_hess_for_logloss(logits, labels):
+      """A closed form gradient and hessian for logistic loss."""
+      # TODO(youngheek): add weights handling.
+      predictions = tf.math.reciprocal(tf.math.exp(-logits) + 1.0)
+      normalizer = tf.math.reciprocal(
+          tf.cast(tf.compat.v1.size(predictions), tf.dtypes.float32))
+      labels = tf.cast(labels, tf.dtypes.float32)
+      labels = head_lib._check_dense_labels_match_logits_and_reshape(  # pylint: disable=protected-access
+          labels, logits, head.logits_dimension)
+      gradients = (predictions - labels) * normalizer
+      hessians = predictions * (1.0 - predictions) * normalizer
+      return gradients, hessians
+
+    closed_form = _grad_and_hess_for_logloss
+  else:
+    closed_form = None
+  return (head, closed_form)
+
+
+def _create_regression_head(label_dimension, weight_column=None):
+  # pylint: disable=protected-access
+  return head_lib._regression_head(
+      label_dimension=label_dimension,
+      weight_column=weight_column,
+      loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_BATCH_SIZE)
+  # pylint: enable=protected-access
+
+
+def _compute_feature_importances_per_tree(tree, num_features):
+  """Computes the importance of each feature in the tree."""
+  importances = np.zeros(num_features)
+
+  for node in tree.nodes:
+    node_type = node.WhichOneof('node')
+    # TODO(crawles): support for categorical features.
+    if node_type == 'bucketized_split':
+      feature_id = node.bucketized_split.feature_id
+      importances[feature_id] += node.metadata.gain
+    elif node_type == 'leaf':
+      assert node.metadata.gain == 0
+    else:
+      raise ValueError('Unexpected split type %s' % node_type)
+
+  return importances
+
+
+def _compute_feature_importances(tree_ensemble, num_features, normalize):
+  """Computes gain-based feature importances.
+
+  The higher the value, the more important the feature.
+
+  Args:
+    tree_ensemble: a trained tree ensemble, instance of proto
+      boosted_trees.TreeEnsemble.
+    num_features: The total number of feature ids.
+    normalize: If True, normalize the feature importances.
+
+  Returns:
+    feature_importances: A list of corresponding feature importances indexed by
+    the original feature ids.
+
+  Raises:
+    AssertionError: When normalize = True, if feature importances
+      contain negative value, or if normalization is not possible
+      (e.g. ensemble is empty or trees contain only a root node).
+  """
+  tree_importances = [
+      _compute_feature_importances_per_tree(tree, num_features)
+      for tree in tree_ensemble.trees
+  ]
+  tree_importances = np.array(tree_importances)
+  tree_weights = np.array(tree_ensemble.tree_weights).reshape(-1, 1)
+  feature_importances = np.sum(tree_importances * tree_weights, axis=0)
+  if normalize:
+    assert np.all(feature_importances >= 0), ('feature_importances '
+                                              'must be non-negative.')
+    normalizer = np.sum(feature_importances)
+    assert normalizer > 0, 'Trees are all empty or contain only a root node.'
+    feature_importances /= normalizer
+
+  return feature_importances
+
+
+def _bt_explanations_fn(features,
+                        head,
+                        sorted_feature_columns,
+                        quantile_sketch_epsilon,
+                        name='boosted_trees'):
+  """Gradient Boosted Trees predict with explanations model_fn.
+
+  Args:
+    features: dict of `Tensor`.
+    head: A `head_lib._Head` instance.
+    sorted_feature_columns: Sorted iterable of `fc_old._FeatureColumn` model
+      inputs.
+    quantile_sketch_epsilon: float between 0 and 1. Error bound for quantile
+      computation. This is only used for float feature columns, and the number
+      of buckets generated per float feature is 1/quantile_sketch_epsilon.
+    name: Name used for the model.
+
+  Returns:
+      An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: mode or params are invalid, or features has the wrong type.
+  """
+  mode = ModeKeys.PREDICT
+  with ops.name_scope(name) as name:
+    # Create Ensemble resources.
+    tree_ensemble = boosted_trees_ops.TreeEnsemble(name=name)
+
+    # pylint: disable=protected-access
+    float_columns = _get_float_feature_columns(sorted_feature_columns)
+    num_float_features = _calculate_num_features(float_columns)
+    # pylint: enable=protected-access
+    num_quantiles = int(1. / quantile_sketch_epsilon)
+    if not num_float_features:
+      input_feature_list = _get_transformed_features(features,
+                                                     sorted_feature_columns)
+    # Create Quantile accumulator resource.
+    else:
+      quantile_accumulator = boosted_trees_ops.QuantileAccumulator(
+          epsilon=quantile_sketch_epsilon,
+          num_streams=num_float_features,
+          num_quantiles=num_quantiles,
+          name=_QUANTILE_ACCUMULATOR_RESOURCE_NAME)
+      bucket_boundaries = quantile_accumulator.get_bucket_boundaries()
+      bucket_boundaries_dict = _get_float_boundaries_dict(
+          float_columns, bucket_boundaries)
+      input_feature_list = _get_transformed_features(features,
+                                                     sorted_feature_columns,
+                                                     bucket_boundaries_dict)
+    logits = boosted_trees_ops.predict(
+        # For non-TRAIN mode, ensemble doesn't change after initialization,
+        # so no local copy is needed; using tree_ensemble directly.
+        tree_ensemble_handle=tree_ensemble.resource_handle,
+        bucketized_features=input_feature_list,
+        logits_dimension=head.logits_dimension)
+
+    estimator_spec = head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=None,
+        train_op_fn=tf.no_op,
+        logits=logits)
+
+    debug_op = boosted_trees_ops.example_debug_outputs(
+        tree_ensemble.resource_handle,
+        bucketized_features=input_feature_list,
+        logits_dimension=head.logits_dimension)
+    estimator_spec.predictions[boosted_trees_utils._DEBUG_PROTO_KEY] = debug_op  # pylint: disable=protected-access
+    return estimator_spec
+
+
+def _get_float_boundaries_dict(float_columns, bucket_boundaries):
+  """Create a dict where key is column name, value is bucket boundaries."""
+  bucket_boundaries_dict = {}
+  feature_idx = 0
+  for column in float_columns:
+    num_column_dimensions = _get_variable_shape(
+        column)[0] if _get_variable_shape(column).as_list() else 1
+    bucket_boundaries_dict[
+        column.name] = bucket_boundaries[feature_idx:feature_idx +
+                                         num_column_dimensions]
+    feature_idx += num_column_dimensions
+  return bucket_boundaries_dict
+
+
+class _BoostedTreesBase(estimator.Estimator):
+  """Base class for boosted trees estimators.
+
+  This class is intended to keep tree-specific functions (E.g., methods for
+  feature importances and directional feature contributions) in one central
+  place.
+
+  It is not a valid (working) Estimator on its own and should only be used as a
+  base class.
+  """
+
+  def __init__(self, model_fn, model_dir, config, feature_columns, head,
+               center_bias, is_classification, quantile_sketch_epsilon):
+    """Initializes a `_BoostedTreesBase` instance.
+
+    Args:
+      model_fn: model_fn: Model function. See base class for more detail.
+      model_dir: Directory to save model parameters, graph and etc. See base
+        class for more detail.
+      config: `estimator.RunConfig` configuration object.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`
+      head: A `head_lib._Head` instance.
+      center_bias: Whether bias centering needs to occur. Bias centering refers
+        to the first node in the very first tree returning the prediction that
+        is aligned with the original labels distribution. For example, for
+        regression problems, the first node will return the mean of the labels.
+        For binary classification problems, it will return a logit for a prior
+        probability of label 1.
+      is_classification: If the estimator is for classification.
+      quantile_sketch_epsilon: float between 0 and 1. Error bound for quantile
+        computation. This is only used for float feature columns, and the number
+        of buckets generated per float feature is 1/quantile_sketch_epsilon.
+    """
+    # We need it so the global step is also a resource var.
+    tf.compat.v1.enable_resource_variables()
+
+    super(_BoostedTreesBase, self).__init__(
+        model_fn=model_fn, model_dir=model_dir, config=config)
+    self._sorted_feature_columns = sorted(
+        feature_columns, key=lambda tc: tc.name)
+    self._head = head
+    self._n_features = _calculate_num_features(self._sorted_feature_columns)
+    self._feature_col_names = _generate_feature_col_name_mapping(
+        self._sorted_feature_columns)
+    self._center_bias = center_bias
+    self._is_classification = is_classification
+    self._quantile_sketch_epsilon = quantile_sketch_epsilon
+
+  def experimental_feature_importances(self, normalize=False):
+    """Computes gain-based feature importances.
+
+    The higher the value, the more important the corresponding feature.
+
+    Args:
+      normalize: If True, normalize the feature importances.
+
+    Returns:
+      feature_importances: an OrderedDict, where the keys are the feature column
+      names and the values are importances. It is sorted by importance.
+
+    Raises:
+      ValueError: When attempting to normalize on an empty ensemble
+        or an ensemble of trees which have no splits. Or when attempting
+        to normalize and feature importances have negative values.
+    """
+    reader = tf.train.load_checkpoint(self._model_dir)
+    serialized = reader.get_tensor('boosted_trees:0_serialized')
+    if not serialized:
+      raise ValueError('Found empty serialized string for TreeEnsemble.'
+                       'You should only call this method after training.')
+    ensemble_proto = boosted_trees_pb2.TreeEnsemble()
+    ensemble_proto.ParseFromString(serialized)
+
+    importances = _compute_feature_importances(ensemble_proto, self._n_features,
+                                               normalize)
+    # pylint:disable=protected-access
+    return boosted_trees_utils._sum_by_feature_col_name_and_sort(
+        self._feature_col_names, importances)
+    # pylint:enable=protected-access
+
+  def experimental_predict_with_explanations(self,
+                                             input_fn,
+                                             predict_keys=None,
+                                             hooks=None,
+                                             checkpoint_path=None):
+    """Computes model explainability outputs per example along with predictions.
+
+    Currently supports directional feature contributions (DFCs). For each
+    instance, DFCs indicate the aggregate contribution of each feature. See
+    https://arxiv.org/abs/1312.1121 and
+    http://blog.datadive.net/interpreting-random-forests/ for more details.
+
+    Args:
+      input_fn: A function that provides input data for predicting as
+        minibatches. See [Premade Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+          for more information. The function should construct and return one of
+        the following:
+        * A `tf.data.Dataset` object: Outputs of `Dataset` object must be a
+          tuple `(features, labels)` with same constraints as below.
+        * A tuple `(features, labels)`: Where `features` is a `tf.Tensor` or a
+          dictionary of string feature name to `Tensor` and `labels` is a
+          `Tensor` or a dictionary of string label name to `Tensor`. Both
+          `features` and `labels` are consumed by `model_fn`. They should
+          satisfy the expectation of `model_fn` from inputs.
+      predict_keys: list of `str`, name of the keys to predict. It is used if
+        the `tf.estimator.EstimatorSpec.predictions` is a `dict`. If
+        `predict_keys` is used then rest of the predictions will be filtered
+        from the dictionary, with the exception of 'bias' and 'dfc', which will
+        always be in the dictionary. If `None`, returns all keys in prediction
+        dict, as well as two new keys 'dfc' and 'bias'.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the prediction call.
+      checkpoint_path: Path of a specific checkpoint to predict. If `None`, the
+        latest checkpoint in `model_dir` is used.  If there are no checkpoints
+        in `model_dir`, prediction is run with newly initialized `Variables`
+        instead of ones restored from checkpoint.
+
+    Yields:
+      Evaluated values of `predictions` tensors. The `predictions` tensors will
+      contain at least two keys 'dfc' and 'bias' for model explanations. The
+      `dfc` value corresponds to the contribution of each feature to the overall
+      prediction for this instance (positive indicating that the feature makes
+      it more likely to select class 1 and negative less likely). The `dfc` is
+      an OrderedDict, where the keys are the feature column names and the values
+      are the contributions. It is sorted by the absolute value of the
+      contribution (e.g OrderedDict([('age', -0.54), ('gender', 0.4), ('fare',
+      0.21)])). The 'bias' value will be the same across all the instances,
+      corresponding to the probability (classification) or prediction
+      (regression) of the training data distribution.
+
+    Raises:
+      ValueError: when wrong arguments are given or unsupported functionalities
+       are requested.
+    """
+    if not self._center_bias:
+      raise ValueError('center_bias must be enabled during estimator '
+                       'instantiation when using '
+                       'experimental_predict_with_explanations.')
+    elif self._head.logits_dimension > 1:
+      raise ValueError('experimental_predict_with_explanations does not yet '
+                       'support multi-class classification and multi-label '
+                       'regression.')
+    # pylint: disable=protected-access
+    if not self._is_classification:
+      identity_inverse_link_fn = self._head._inverse_link_fn in (None,
+                                                                 tf_identity)
+      # pylint:enable=protected-access
+      if not identity_inverse_link_fn:
+        raise ValueError(
+            'For now only identity inverse_link_fn in regression_head is '
+            'supported for experimental_predict_with_explanations.')
+
+    # pylint:disable=unused-argument
+    def new_model_fn(features, labels, mode):
+      return _bt_explanations_fn(features, self._head,
+                                 self._sorted_feature_columns,
+                                 self._quantile_sketch_epsilon)
+
+    # pylint:enable=unused-argument
+    est = estimator.Estimator(
+        model_fn=new_model_fn,
+        model_dir=self.model_dir,
+        config=self.config,
+        warm_start_from=self._warm_start_settings)
+    # Make sure bias and dfc will be in prediction dict.
+    user_supplied_predict_keys = predict_keys is not None
+    if user_supplied_predict_keys:
+      predict_keys = set(predict_keys)
+      predict_keys.add(boosted_trees_utils._DEBUG_PROTO_KEY)
+    predictions = est.predict(
+        input_fn,
+        predict_keys=predict_keys,
+        hooks=hooks,
+        checkpoint_path=checkpoint_path,
+        yield_single_examples=True)
+    for pred in predictions:
+      bias, dfcs = boosted_trees_utils._parse_explanations_from_prediction(
+          pred[boosted_trees_utils._DEBUG_PROTO_KEY], self._feature_col_names,
+          self._is_classification)
+      pred['bias'] = bias
+      pred['dfc'] = dfcs
+      # Don't need to expose serialized proto to end user.
+      del pred[boosted_trees_utils._DEBUG_PROTO_KEY]
+      yield pred
+
+
+def _validate_input_params(tree_params):
+  """Validate input parameters."""
+  positive = ('n_trees', 'max_depth', 'learning_rate',
+              'quantile_sketch_epsilon')
+  for p in positive:
+    if getattr(tree_params, p) <= 0:
+      raise ValueError('Expected {} > 0, received: {}'.format(
+          p, getattr(tree_params, p)))
+  non_negative = ('l1', 'l2', 'tree_complexity', 'min_node_weight')
+  for p in non_negative:
+    if getattr(tree_params, p) < 0:
+      raise ValueError('Expected {} >= 0, received: {}'.format(
+          p, getattr(tree_params, p)))
+
+
+# pylint: disable=protected-access
+@estimator_export('estimator.BoostedTreesClassifier')
+class BoostedTreesClassifier(_BoostedTreesBase):
+  """A Classifier for Tensorflow Boosted Trees models.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               feature_columns,
+               n_batches_per_layer,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               n_trees=100,
+               max_depth=6,
+               learning_rate=0.1,
+               l1_regularization=0.,
+               l2_regularization=0.,
+               tree_complexity=0.,
+               min_node_weight=0.,
+               config=None,
+               center_bias=False,
+               pruning_mode='none',
+               quantile_sketch_epsilon=0.01,
+               train_in_memory=False):
+    """Initializes a `BoostedTreesClassifier` instance.
+
+    Example:
+
+    ```python
+    bucketized_feature_1 = bucketized_column(
+      numeric_column('feature_1'), BUCKET_BOUNDARIES_1)
+    bucketized_feature_2 = bucketized_column(
+      numeric_column('feature_2'), BUCKET_BOUNDARIES_2)
+
+    # Need to see a large portion of the data before we can build a layer, for
+    # example half of data n_batches_per_layer = 0.5 * NUM_EXAMPLES / BATCH_SIZE
+    # Also note that it is usually beneficial to set some regularization, for
+    # example, l2. A good default value is 1./number of examples per layer.
+    classifier = estimator.BoostedTreesClassifier(
+        feature_columns=[bucketized_feature_1, bucketized_feature_2],
+        n_batches_per_layer=n_batches_per_layer,
+        n_trees=100,
+        l2_regularization = 1./(n_batches_per_layer*batch_size)
+        ... <some other params>
+    )
+
+    def input_fn_train():
+      ...
+      return dataset
+
+    classifier.train(input_fn=input_fn_train)
+
+    def input_fn_eval():
+      ...
+      return dataset
+
+    metrics = classifier.evaluate(input_fn=input_fn_eval)
+
+    when train_in_memory = True, make sure the input fn is not batched:
+    def input_fn_train():
+      return tf.data.Dataset.zip(
+        (tf.data.Dataset.from_tensors({'f1': f1_array, ...}),
+         tf.data.Dataset.from_tensors(label_array)))
+    ```
+
+    Args:
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      n_batches_per_layer: the number of batches to collect statistics per
+        layer. The total number of batches is total number of data divided by
+        batch size.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      n_classes: number of label classes. Default is binary classification.
+      weight_column: A string or a `NumericColumn` created by
+        `tf.fc_old.numeric_column` defining feature column representing weights.
+        It is used to downweight or boost examples during training. It will be
+        multiplied by the loss of the example. If it is a string, it is used as
+        a key to fetch weight tensor from the `features`. If it is a
+        `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        `weight_column.normalizer_fn` is applied on it to get weight tensor.
+      label_vocabulary: A list of strings represents possible label values. If
+        given, labels must be string type and have any value in
+        `label_vocabulary`. If it is not given, that means labels are already
+        encoded as integer or float within `[0, 1]` for `n_classes=2` and
+        encoded as integer values in {0, 1,..., n_classes-1} for `n_classes>2`.
+        Also, there will be errors if vocabulary is not provided and labels are
+        string.
+      n_trees: number trees to be created.
+      max_depth: maximum depth of the tree to grow.
+      learning_rate: shrinkage parameter to be used when a tree added to the
+        model.
+      l1_regularization: regularization multiplier applied to the absolute
+        weights of the tree leafs. This is a per instance value. A good default
+        is 1./(n_batches_per_layer*batch_size).
+      l2_regularization: regularization multiplier applied to the square weights
+        of the tree leafs. This is a per instance value. A good default is
+        1./(n_batches_per_layer*batch_size).
+      tree_complexity: regularization factor to penalize trees with more leaves.
+        This is a per instance value. A good default is
+        1./(n_batches_per_layer*batch_size).
+      min_node_weight: min_node_weight: minimum hessian a node must have for a
+        split to be considered. This is a per instance value. The value will be
+        compared with `sum(leaf_hessian)/(batch_size * n_batches_per_layer)`.
+      config: `RunConfig` object to configure the runtime settings.
+      center_bias: Whether bias centering needs to occur. Bias centering refers
+        to the first node in the very first tree returning the prediction that
+        is aligned with the original labels distribution. For example, for
+        regression problems, the first node will return the mean of the labels.
+        For binary classification problems, it will return a logit for a prior
+        probability of label 1.
+      pruning_mode: one of `none`, `pre`, `post` to indicate no pruning, pre-
+        pruning (do not split a node if not enough gain is observed) and post
+        pruning (build the tree up to a max depth and then prune branches with
+        negative gain). For pre and post pruning, you MUST provide
+        `tree_complexity >0`.
+      quantile_sketch_epsilon: float between 0 and 1. Error bound for quantile
+        computation. This is only used for float feature columns, and the number
+        of buckets generated per float feature is `1/quantile_sketch_epsilon`.
+      train_in_memory: `bool`, when true, it assumes the dataset is in memory,
+        i.e., `input_fn` should return the entire dataset as a single batch,
+        `n_batches_per_layer` should be set as 1, `num_worker_replicas` should
+        be 1, and `num_ps_replicas` should be 0 in `tf.Estimator.RunConfig`.
+
+    Raises:
+      ValueError: when wrong arguments are given or unsupported functionalities
+         are requested.
+    """
+    if n_classes > 2 and center_bias:
+      raise ValueError('center_bias not yet support with n_classes > 2.')
+    head, closed_form = _create_classification_head_and_closed_form(
+        n_classes, weight_column, label_vocabulary=label_vocabulary)
+    # HParams for the model.
+    tree_hparams = _TreeHParams(n_trees, max_depth, learning_rate,
+                                l1_regularization, l2_regularization,
+                                tree_complexity, min_node_weight, center_bias,
+                                pruning_mode, quantile_sketch_epsilon)
+    _validate_input_params(tree_hparams)
+
+    def _model_fn(features, labels, mode, config):
+      return _bt_model_fn(
+          features,
+          labels,
+          mode,
+          head,
+          feature_columns,
+          tree_hparams,
+          n_batches_per_layer,
+          config,
+          closed_form_grad_and_hess_fn=closed_form,
+          weight_column=weight_column,
+          train_in_memory=train_in_memory)
+
+    super(BoostedTreesClassifier, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        feature_columns=feature_columns,
+        head=head,
+        center_bias=center_bias,
+        is_classification=True,
+        quantile_sketch_epsilon=quantile_sketch_epsilon)
+
+
+@estimator_export('estimator.BoostedTreesRegressor')
+class BoostedTreesRegressor(_BoostedTreesBase):
+  """A Regressor for Tensorflow Boosted Trees models.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               feature_columns,
+               n_batches_per_layer,
+               model_dir=None,
+               label_dimension=1,
+               weight_column=None,
+               n_trees=100,
+               max_depth=6,
+               learning_rate=0.1,
+               l1_regularization=0.,
+               l2_regularization=0.,
+               tree_complexity=0.,
+               min_node_weight=0.,
+               config=None,
+               center_bias=False,
+               pruning_mode='none',
+               quantile_sketch_epsilon=0.01,
+               train_in_memory=False):
+    """Initializes a `BoostedTreesRegressor` instance.
+
+    Example:
+
+    ```python
+    bucketized_feature_1 = bucketized_column(
+      numeric_column('feature_1'), BUCKET_BOUNDARIES_1)
+    bucketized_feature_2 = bucketized_column(
+      numeric_column('feature_2'), BUCKET_BOUNDARIES_2)
+
+    # Need to see a large portion of the data before we can build a layer, for
+    # example half of data n_batches_per_layer = 0.5 * NUM_EXAMPLES / BATCH_SIZE
+    regressor = estimator.BoostedTreesRegressor(
+        feature_columns=[bucketized_feature_1, bucketized_feature_2],
+        n_batches_per_layer=n_batches_per_layer,
+        n_trees=100,
+        ... <some other params>
+    )
+
+    def input_fn_train():
+      ...
+      return dataset
+
+    regressor.train(input_fn=input_fn_train)
+
+    def input_fn_eval():
+      ...
+      return dataset
+
+    metrics = regressor.evaluate(input_fn=input_fn_eval)
+    ```
+
+    Args:
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      n_batches_per_layer: the number of batches to collect statistics per
+        layer. The total number of batches is total number of data divided by
+        batch size.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      label_dimension: Number of regression targets per example.
+      weight_column: A string or a `NumericColumn` created by
+        `tf.fc_old.numeric_column` defining feature column representing weights.
+        It is used to downweight or boost examples during training. It will be
+        multiplied by the loss of the example. If it is a string, it is used as
+        a key to fetch weight tensor from the `features`. If it is a
+        `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        `weight_column.normalizer_fn` is applied on it to get weight tensor.
+      n_trees: number trees to be created.
+      max_depth: maximum depth of the tree to grow.
+      learning_rate: shrinkage parameter to be used when a tree added to the
+        model.
+      l1_regularization: regularization multiplier applied to the absolute
+        weights of the tree leafs.
+      l2_regularization: regularization multiplier applied to the square weights
+        of the tree leafs.
+      tree_complexity: regularization factor to penalize trees with more leaves.
+      min_node_weight: min_node_weight: minimum hessian a node must have for a
+        split to be considered. The value will be compared with
+        `sum(leaf_hessian)/(batch_size * n_batches_per_layer)`.
+      config: `RunConfig` object to configure the runtime settings.
+      center_bias: Whether bias centering needs to occur. Bias centering refers
+        to the first node in the very first tree returning the prediction that
+        is aligned with the original labels distribution. For example, for
+        regression problems, the first node will return the mean of the labels.
+        For binary classification problems, it will return a logit for a prior
+        probability of label 1.
+      pruning_mode: one of `none`, `pre`, `post` to indicate no pruning, pre-
+        pruning (do not split a node if not enough gain is observed) and post
+        pruning (build the tree up to a max depth and then prune branches with
+        negative gain). For pre and post pruning, you MUST provide
+        `tree_complexity>0`.
+      quantile_sketch_epsilon: float between 0 and 1. Error bound for quantile
+        computation. This is only used for float feature columns, and the number
+        of buckets generated per float feature is `1/quantile_sketch_epsilon`.
+      train_in_memory: `bool`, when true, it assumes the dataset is in memory,
+        i.e., `input_fn` should return the entire dataset as a single batch,
+        `n_batches_per_layer` should be set as 1, `num_worker_replicas` should
+        be 1, and `num_ps_replicas` should be 0 in `tf.Estimator.RunConfig`.
+
+    Raises:
+      ValueError: when wrong arguments are given or unsupported functionalities
+         are requested.
+    """
+    if label_dimension > 1 and center_bias:
+      raise ValueError('center_bias not yet support with label_dimension > 1.')
+    head = _create_regression_head(label_dimension, weight_column)
+
+    # HParams for the model.
+    tree_hparams = _TreeHParams(n_trees, max_depth, learning_rate,
+                                l1_regularization, l2_regularization,
+                                tree_complexity, min_node_weight, center_bias,
+                                pruning_mode, quantile_sketch_epsilon)
+    _validate_input_params(tree_hparams)
+
+    def _model_fn(features, labels, mode, config):
+      return _bt_model_fn(
+          features,
+          labels,
+          mode,
+          head,
+          feature_columns,
+          tree_hparams,
+          n_batches_per_layer,
+          config,
+          weight_column=weight_column,
+          train_in_memory=train_in_memory)
+
+    super(BoostedTreesRegressor, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        feature_columns=feature_columns,
+        head=head,
+        center_bias=center_bias,
+        is_classification=False,
+        quantile_sketch_epsilon=quantile_sketch_epsilon)
+
+
+@estimator_export('estimator.BoostedTreesEstimator')
+class BoostedTreesEstimator(_BoostedTreesBase):  # pylint: disable=protected-access
+  """An Estimator for Tensorflow Boosted Trees models."""
+
+  def __init__(self,
+               feature_columns,
+               n_batches_per_layer,
+               head,
+               model_dir=None,
+               weight_column=None,
+               n_trees=100,
+               max_depth=6,
+               learning_rate=0.1,
+               l1_regularization=0.,
+               l2_regularization=0.,
+               tree_complexity=0.,
+               min_node_weight=0.,
+               config=None,
+               center_bias=False,
+               pruning_mode='none',
+               quantile_sketch_epsilon=0.01):
+    """Initializes a `BoostedTreesEstimator` instance.
+
+    Use this interface if you need to provide a custom loss/head.
+    For example, the following will be equivalent to using
+    BoostedTreesRegressor
+
+    # Create a head with L2 loss
+    from tensorflow_estimator.python.estimator.canned import
+    head_lib
+
+    head = head_lib._regression_head(label_dimension=1)
+    est = boosted_trees.BoostedTreesEstimator(
+        feature_columns=...,
+        n_batches_per_layer=...,
+        head=head,
+        n_trees=...,
+        max_depth=...)
+
+    Args:
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      n_batches_per_layer: the number of batches to collect statistics per
+        layer.
+      head: the `Head` instance defined for Estimator.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into an estimator to
+        continue training a previously saved model.
+      weight_column: A string or a `_NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to downweight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      n_trees: number trees to be created.
+      max_depth: maximum depth of the tree to grow.
+      learning_rate: shrinkage parameter to be used when a tree added to the
+        model.
+      l1_regularization: regularization multiplier applied to the absolute
+        weights of the tree leafs.
+      l2_regularization: regularization multiplier applied to the square weights
+        of the tree leafs.
+      tree_complexity: regularization factor to penalize trees with more leaves.
+      min_node_weight: minimum hessian a node must have for a split to be
+        considered. The value will be compared with `sum(leaf_hessian)/
+        (batch_size * n_batches_per_layer)`.
+      config: `RunConfig` object to configure the runtime settings.
+      center_bias: Whether bias centering needs to occur. Bias centering refers
+        to the first node in the very first tree returning the prediction that
+        is aligned with the original labels distribution. For example, for
+        regression problems, the first node will return the mean of the labels.
+        For binary classification problems, it will return a logit for a prior
+        probability of label 1.
+      pruning_mode: one of `none`, `pre`, `post` to indicate no pruning, pre-
+        pruning (do not split a node if not enough gain is observed) and post
+        pruning (build the tree up to a max depth and then prune branches with
+        negative gain). For pre and post pruning, you MUST provide
+        `tree_complexity>0`.
+      quantile_sketch_epsilon: float between 0 and 1. Error bound for quantile
+        computation. This is only used for float feature columns, and the number
+        of buckets generated per float feature is `1/quantile_sketch_epsilon`.
+
+    Raises:
+      ValueError: when wrong arguments are given or unsupported functionalities
+         are requested.
+    """
+    # HParams for the model.
+    # pylint: disable=protected-access
+    tree_hparams = _TreeHParams(n_trees, max_depth, learning_rate,
+                                l1_regularization, l2_regularization,
+                                tree_complexity, min_node_weight, center_bias,
+                                pruning_mode, quantile_sketch_epsilon)
+    _validate_input_params(tree_hparams)
+
+    def _model_fn(features, labels, mode, config):
+      return _bt_model_fn(
+          features,
+          labels,
+          mode,
+          head,
+          feature_columns,
+          tree_hparams,
+          n_batches_per_layer,
+          config=config)
+
+    def _is_classification_head(head):
+      """Infers if the head is a classification head."""
+      # Check using all classification heads defined in canned/head.py. However, it
+      # is not a complete list - it does not check for other classification heads
+      # not defined in the head library.
+      # pylint: disable=protected-access
+      return isinstance(
+          head, (head_lib._BinaryLogisticHeadWithSigmoidCrossEntropyLoss,
+                 head_lib._MultiClassHeadWithSoftmaxCrossEntropyLoss))
+      # pylint: enable=protected-access
+
+    super(BoostedTreesEstimator, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        feature_columns=feature_columns,
+        head=head,
+        center_bias=center_bias,
+        is_classification=_is_classification_head(head),
+        quantile_sketch_epsilon=quantile_sketch_epsilon)
+    # pylint: enable=protected-access
+
+
+def _get_variable_shape(column):
+  """Returns the variable shape of the provided column."""
+  if feature_column_lib.is_feature_column_v2([column]):
+    return column.variable_shape
+  else:
+    return column._variable_shape
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees_utils.py
new file mode 100644
index 00000000..0f931694
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/boosted_trees_utils.py
@@ -0,0 +1,94 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Debug and model explainability logic for boosted trees."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+import numpy as np
+
+from tensorflow.core.kernels.boosted_trees import boosted_trees_pb2
+
+# For directional feature contributions.
+_DEBUG_PROTO_KEY = '_serialized_debug_outputs_proto'
+_BIAS_ID = 0
+
+
+def _parse_debug_proto_string(example_proto_serialized):
+  example_debug_outputs = boosted_trees_pb2.DebugOutput()
+  example_debug_outputs.ParseFromString(example_proto_serialized)
+  feature_ids = example_debug_outputs.feature_ids
+  logits_path = example_debug_outputs.logits_path
+  return feature_ids, logits_path
+
+
+def _compute_directional_feature_contributions(example_feature_ids,
+                                               example_logits_paths, activation,
+                                               feature_col_names):
+  """Directional feature contributions and bias, per example."""
+  # Initialize contributions to 0.
+  num_features = len(feature_col_names)
+  # Traverse tree subtracting child prediction from parent prediction and
+  # associating change with feature id used to split.
+  predictions = np.array(activation(example_logits_paths))
+  delta_pred = predictions[_BIAS_ID + 1:] - predictions[:-1]
+  # Group by feature id, then sum delta_pred.
+  contribs = np.bincount(
+      example_feature_ids, weights=delta_pred, minlength=num_features)
+  dfcs = {}
+  for f, dfc in zip(range(num_features), contribs):
+    dfcs[f] = dfcs.setdefault(f, 0) + dfc
+  dfcs = _sum_by_feature_col_name_and_sort(feature_col_names, contribs)
+  return predictions[_BIAS_ID], dfcs
+
+
+def _identity(logits):
+  return logits
+
+
+def _sigmoid(logits):
+  # TODO(crawles): Change to softmax once multiclass support is available.
+  return 1 / (1 + np.exp(-np.array(logits)))
+
+
+def _parse_explanations_from_prediction(serialized_debug_proto,
+                                        feature_col_names,
+                                        classification=False):
+  """Parse serialized explanability proto, compute dfc, and return bias, dfc."""
+  example_feature_ids, example_logits_path = _parse_debug_proto_string(
+      serialized_debug_proto)
+  if classification:
+    activation = _sigmoid
+  else:
+    activation = _identity
+  bias, dfcs = _compute_directional_feature_contributions(
+      example_feature_ids, example_logits_path, activation, feature_col_names)
+  # TODO(crawles): Prediction path and leaf IDs.
+  return bias, dfcs
+
+
+def _sum_by_feature_col_name_and_sort(names, vals):
+  """Group by feature column names, sum values, and sort by absolute value."""
+  sum_by_dict = {}
+  # Groupby and sum.
+  for name, val in zip(names, vals):
+    sum_by_dict[name] = sum_by_dict.setdefault(name, 0) + val
+  # Then sort.
+  sorted_sum_by = sorted(
+      sum_by_dict.items(), key=lambda tup: abs(tup[1]), reverse=True)
+  return collections.OrderedDict(sorted_sum_by)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn.py
new file mode 100644
index 00000000..fb266776
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn.py
@@ -0,0 +1,1229 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Deep Neural Network estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column
+from tensorflow.python.feature_column import feature_column_lib
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.engine import training
+from tensorflow.python.keras.layers import core as keras_core
+from tensorflow.python.keras.layers import normalization as keras_norm
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.canned import optimizers
+from tensorflow_estimator.python.estimator.head import head_utils
+from tensorflow_estimator.python.estimator.head import regression_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# The default learning rate of 0.05 is a historical artifact of the initial
+# implementation, but seems a reasonable choice.
+_LEARNING_RATE = 0.05
+
+
+def _add_hidden_layer_summary(value, tag):
+  tf.compat.v1.summary.scalar('%s/fraction_of_zero_values' % tag,
+                              tf.math.zero_fraction(value))
+  tf.compat.v1.summary.histogram('%s/activation' % tag, value)
+
+
+@estimator_export(v1=['estimator.experimental.dnn_logit_fn_builder'])
+def dnn_logit_fn_builder(units, hidden_units, feature_columns, activation_fn,
+                         dropout, input_layer_partitioner, batch_norm):
+  """Function builder for a dnn logit_fn.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.  In the MultiHead
+      case, this should be the sum of all component Heads' logit dimensions.
+    hidden_units: Iterable of integer number of hidden units per layer.
+    feature_columns: Iterable of `feature_column._FeatureColumn` model inputs.
+    activation_fn: Activation function applied to each layer.
+    dropout: When not `None`, the probability we will drop out a given
+      coordinate.
+    input_layer_partitioner: Partitioner for input layer.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+
+  Returns:
+    A logit_fn (see below).
+
+  Raises:
+    ValueError: If units is not an int.
+  """
+  if not isinstance(units, six.integer_types):
+    raise ValueError('units must be an int.  Given type: {}'.format(
+        type(units)))
+
+  def dnn_logit_fn(features, mode):
+    """Deep Neural Network logit_fn.
+
+    Args:
+      features: This is the first item returned from the `input_fn` passed to
+        `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+        `dict` of same.
+      mode: Optional. Specifies if this training, evaluation or prediction. See
+        `ModeKeys`.
+
+    Returns:
+      A `Tensor` representing the logits, or a list of `Tensor`'s representing
+      multiple logits in the MultiHead case.
+    """
+    dnn_model = _DNNModel(
+        units,
+        hidden_units,
+        feature_columns,
+        activation_fn,
+        dropout,
+        input_layer_partitioner,
+        batch_norm,
+        name='dnn')
+    return dnn_model(features, mode)
+
+  return dnn_logit_fn
+
+
+def dnn_logit_fn_builder_v2(units, hidden_units, feature_columns, activation_fn,
+                            dropout, batch_norm):
+  """Function builder for a dnn logit_fn.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.  In the MultiHead
+      case, this should be the sum of all component Heads' logit dimensions.
+    hidden_units: Iterable of integer number of hidden units per layer.
+    feature_columns: Iterable of `feature_column._FeatureColumn` model inputs.
+    activation_fn: Activation function applied to each layer.
+    dropout: When not `None`, the probability we will drop out a given
+      coordinate.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+
+  Returns:
+    A logit_fn (see below).
+
+  Raises:
+    ValueError: If units is not an int.
+  """
+  if not isinstance(units, six.integer_types):
+    raise ValueError('units must be an int.  Given type: {}'.format(
+        type(units)))
+
+  def dnn_logit_fn(features, mode):
+    """Deep Neural Network logit_fn.
+
+    Args:
+      features: This is the first item returned from the `input_fn` passed to
+        `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+        `dict` of same.
+      mode: Optional. Specifies if this training, evaluation or prediction. See
+        `ModeKeys`.
+
+    Returns:
+      A `Tensor` representing the logits, or a list of `Tensor`'s representing
+      multiple logits in the MultiHead case.
+    """
+    dnn_model = _DNNModelV2(
+        units,
+        hidden_units,
+        feature_columns,
+        activation_fn,
+        dropout,
+        batch_norm,
+        name='dnn')
+    return dnn_model(features, mode)
+
+  return dnn_logit_fn
+
+
+def _get_previous_name_scope():
+  current_name_scope = ops.get_name_scope()
+  return current_name_scope.rsplit('/', 1)[0] + '/'
+
+
+class _DNNModel(training.Model):
+  """A DNN Model."""
+
+  def __init__(self,
+               units,
+               hidden_units,
+               feature_columns,
+               activation_fn,
+               dropout,
+               input_layer_partitioner,
+               batch_norm,
+               name=None,
+               **kwargs):
+    super(_DNNModel, self).__init__(name=name, **kwargs)
+    if feature_column_lib.is_feature_column_v2(feature_columns):
+      self._input_layer = tf.compat.v1.keras.layers.DenseFeatures(
+          feature_columns=feature_columns, name='input_layer')
+    else:
+      self._input_layer = feature_column.InputLayer(
+          feature_columns=feature_columns,
+          name='input_layer',
+          create_scope_now=False)
+
+    self._add_layer(self._input_layer, 'input_layer')
+
+    self._dropout = dropout
+    self._batch_norm = batch_norm
+
+    self._hidden_layers = []
+    self._dropout_layers = []
+    self._batch_norm_layers = []
+    self._hidden_layer_scope_names = []
+    for layer_id, num_hidden_units in enumerate(hidden_units):
+      with tf.compat.v1.variable_scope('hiddenlayer_%d' %
+                                       layer_id) as hidden_layer_scope:
+        hidden_layer = tf.compat.v1.layers.Dense(
+            units=num_hidden_units,
+            activation=activation_fn,
+            kernel_initializer=tf.compat.v1.glorot_uniform_initializer(),
+            name=hidden_layer_scope,
+            _scope=hidden_layer_scope)
+        self._add_layer(hidden_layer, hidden_layer_scope.name)
+        self._hidden_layer_scope_names.append(hidden_layer_scope.name)
+        self._hidden_layers.append(hidden_layer)
+        if self._dropout is not None:
+          dropout_layer = tf.compat.v1.layers.Dropout(rate=self._dropout)
+          self._add_layer(dropout_layer, dropout_layer.name)
+          self._dropout_layers.append(dropout_layer)
+        if self._batch_norm:
+          batch_norm_layer = tf.compat.v1.layers.BatchNormalization(
+              # The default momentum 0.99 actually crashes on certain
+              # problem, so here we use 0.999, which is the default of
+              # tf.contrib.layers.batch_norm.
+              momentum=0.999,
+              trainable=True,
+              name='batchnorm_%d' % layer_id,
+              _scope='batchnorm_%d' % layer_id)
+          self._add_layer(batch_norm_layer, batch_norm_layer.name)
+          self._batch_norm_layers.append(batch_norm_layer)
+
+    with tf.compat.v1.variable_scope('logits') as logits_scope:
+      self._logits_layer = tf.compat.v1.layers.Dense(
+          units=units,
+          activation=None,
+          kernel_initializer=tf.compat.v1.glorot_uniform_initializer(),
+          name=logits_scope,
+          _scope=logits_scope)
+      self._add_layer(self._logits_layer, logits_scope.name)
+      self._logits_scope_name = logits_scope.name
+    self._input_layer_partitioner = input_layer_partitioner
+
+  def call(self, features, mode):
+    is_training = mode == ModeKeys.TRAIN
+    # The Keras training.Model adds a name_scope with the name of the model
+    # which modifies the constructed graph. Hence we add another name_scope
+    # here which is the one before the training.Model one was applied.
+    # TODO(rohanj): Remove this in TF 2.0 (b/116728605)
+    with ops.name_scope(name=_get_previous_name_scope()):
+      # TODO(rohanj): Remove dependence on variable scope for partitioning.
+      with tf.compat.v1.variable_scope(
+          'input_from_feature_columns',
+          partitioner=self._input_layer_partitioner):
+        try:
+          net = self._input_layer(features, training=is_training)
+        except TypeError:
+          net = self._input_layer(features)
+      for i in range(len(self._hidden_layers)):
+        net = self._hidden_layers[i](net)
+        if self._dropout is not None and is_training:
+          net = self._dropout_layers[i](net, training=True)
+        if self._batch_norm:
+          net = self._batch_norm_layers[i](net, training=is_training)
+        _add_hidden_layer_summary(net, self._hidden_layer_scope_names[i])
+
+      logits = self._logits_layer(net)
+      _add_hidden_layer_summary(logits, self._logits_scope_name)
+      return logits
+
+  def _add_layer(self, layer, layer_name):
+    # "Magic" required for keras.Model classes to track all the variables in
+    # a list of layers.Layer objects.
+    # TODO(ashankar): Figure out API so user code doesn't have to do this.
+    setattr(self, layer_name, layer)
+
+
+def _name_from_scope_name(name):
+  """Returns the name of an op given the name of its scope.
+
+  Args:
+    name: the name of the scope.
+
+  Returns:
+    the name of the op (equal to scope name minus any trailing slash).
+  """
+  return name[:-1] if (name and name[-1] == '/') else name
+
+
+class _DNNModelV2(training.Model):
+  """A DNN Model."""
+
+  def __init__(self,
+               units,
+               hidden_units,
+               feature_columns,
+               activation_fn,
+               dropout,
+               batch_norm,
+               name=None,
+               **kwargs):
+    super(_DNNModelV2, self).__init__(name=name, **kwargs)
+    with ops.name_scope(
+        'input_from_feature_columns') as input_feature_column_scope:
+      layer_name = input_feature_column_scope + 'input_layer'
+      if feature_column_lib.is_feature_column_v2(feature_columns):
+        self._input_layer = tf.compat.v2.keras.layers.DenseFeatures(
+            feature_columns=feature_columns, name=layer_name)
+      else:
+        raise ValueError(
+            'Received a feature column from TensorFlow v1, but this is a '
+            'TensorFlow v2 Estimator. Please either use v2 feature columns '
+            '(accessible via tf.feature_column.* in TF 2.x) with this '
+            'Estimator, or switch to a v1 Estimator for use with v1 feature '
+            'columns (accessible via tf.compat.v1.estimator.* and '
+            'tf.compat.v1.feature_column.*, respectively.')
+
+    self._dropout = dropout
+    self._batch_norm = batch_norm
+
+    self._hidden_layers = []
+    self._dropout_layers = []
+    self._batch_norm_layers = []
+    self._hidden_layer_scope_names = []
+    for layer_id, num_hidden_units in enumerate(hidden_units):
+      with ops.name_scope('hiddenlayer_%d' % layer_id) as hidden_layer_scope:
+        # Get scope name without the trailing slash.
+        hidden_shared_name = _name_from_scope_name(hidden_layer_scope)
+        hidden_layer = keras_core.Dense(
+            units=num_hidden_units,
+            activation=activation_fn,
+            kernel_initializer=tf.compat.v1.glorot_uniform_initializer(),
+            name=hidden_shared_name)
+        self._hidden_layer_scope_names.append(hidden_shared_name)
+        self._hidden_layers.append(hidden_layer)
+        if self._dropout is not None:
+          dropout_layer = keras_core.Dropout(rate=self._dropout)
+          self._dropout_layers.append(dropout_layer)
+        if self._batch_norm:
+          batch_norm_name = hidden_shared_name + '/batchnorm_%d' % layer_id
+          # TODO(scottzhu): Change back to use BatchNormalization when the
+          # cleanup is done.
+          batch_norm_layer = keras_norm.BatchNormalizationBase(
+              # The default momentum 0.99 actually crashes on certain
+              # problem, so here we use 0.999, which is the default of
+              # tf.contrib.layers.batch_norm.
+              momentum=0.999,
+              trainable=True,
+              name=batch_norm_name)
+          self._batch_norm_layers.append(batch_norm_layer)
+
+    with ops.name_scope('logits') as logits_scope:
+      logits_shared_name = _name_from_scope_name(logits_scope)
+      self._logits_layer = keras_core.Dense(
+          units=units,
+          activation=None,
+          kernel_initializer=tf.compat.v1.glorot_uniform_initializer(),
+          name=logits_shared_name)
+      self._logits_scope_name = logits_shared_name
+
+  def call(self, features, mode):
+    is_training = mode == ModeKeys.TRAIN
+    try:
+      net = self._input_layer(features, training=is_training)
+    except TypeError:
+      net = self._input_layer(features)
+    for i in range(len(self._hidden_layers)):
+      net = self._hidden_layers[i](net)
+      if self._dropout is not None and is_training:
+        net = self._dropout_layers[i](net, training=True)
+      if self._batch_norm:
+        net = self._batch_norm_layers[i](net, training=is_training)
+      _add_hidden_layer_summary(net, self._hidden_layer_scope_names[i])
+
+    logits = self._logits_layer(net)
+    _add_hidden_layer_summary(logits, self._logits_scope_name)
+    return logits
+
+
+def _validate_features(features):
+  if not isinstance(features, dict):
+    raise ValueError('features should be a dictionary of `Tensor`s. '
+                     'Given type: {}'.format(type(features)))
+
+
+def _get_dnn_estimator_spec(use_tpu, head, features, labels, mode, logits,
+                            optimizer):
+  """Get EstimatorSpec for DNN Model."""
+  if use_tpu:
+    return head._create_tpu_estimator_spec(  # pylint: disable=protected-access
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=optimizer,
+        logits=logits)
+  else:
+    return head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=optimizer,
+        logits=logits)
+
+
+def _dnn_model_fn(features,
+                  labels,
+                  mode,
+                  head,
+                  hidden_units,
+                  feature_columns,
+                  optimizer='Adagrad',
+                  activation_fn=tf.nn.relu,
+                  dropout=None,
+                  input_layer_partitioner=None,
+                  config=None,
+                  use_tpu=False,
+                  batch_norm=False):
+  """Deep Neural Net model_fn v1.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
+      `int32` or `int64` in the range `[0, n_classes)`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `head_lib._Head` instance.
+    hidden_units: Iterable of integer number of hidden units per layer.
+    feature_columns: Iterable of `feature_column._FeatureColumn` model inputs.
+    optimizer: String, `tf.Optimizer` object, or callable that creates the
+      optimizer to use for training. If not specified, will use the Adagrad
+      optimizer with a default learning rate of 0.05.
+    activation_fn: Activation function applied to each layer.
+    dropout: When not `None`, the probability we will drop out a given
+      coordinate.
+    input_layer_partitioner: Partitioner for input layer. Defaults to
+      `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
+    config: `RunConfig` object to configure the runtime settings.
+    use_tpu: Whether to make a DNN model able to run on TPU. Will make function
+      return a `_TPUEstimatorSpec` instance and disable variable partitioning.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: If features has the wrong type.
+  """
+
+  optimizer = optimizers.get_optimizer_instance(
+      optimizer, learning_rate=_LEARNING_RATE)
+
+  _validate_features(features)
+
+  num_ps_replicas = config.num_ps_replicas if config else 0
+
+  partitioner = (None if use_tpu else tf.compat.v1.min_max_variable_partitioner(
+      max_partitions=num_ps_replicas))
+  with tf.compat.v1.variable_scope(
+      'dnn', values=tuple(six.itervalues(features)), partitioner=partitioner):
+    input_layer_partitioner = input_layer_partitioner or (
+        None if use_tpu else tf.compat.v1.min_max_variable_partitioner(
+            max_partitions=num_ps_replicas, min_slice_size=64 << 20))
+
+    logit_fn = dnn_logit_fn_builder(
+        units=head.logits_dimension,
+        hidden_units=hidden_units,
+        feature_columns=feature_columns,
+        activation_fn=activation_fn,
+        dropout=dropout,
+        input_layer_partitioner=input_layer_partitioner,
+        batch_norm=batch_norm)
+    logits = logit_fn(features=features, mode=mode)
+
+    return _get_dnn_estimator_spec(use_tpu, head, features, labels, mode,
+                                   logits, optimizer)
+
+
+def _dnn_model_fn_builder_v2(units, hidden_units, feature_columns,
+                             activation_fn, dropout, batch_norm, features,
+                             mode):
+  """Function builder for dnn logits, trainable variables and update ops.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.  In the MultiHead
+      case, this should be the sum of all component Heads' logit dimensions.
+    hidden_units: Iterable of integer number of hidden units per layer.
+    feature_columns: Iterable of `feature_column._FeatureColumn` model inputs.
+    activation_fn: Activation function applied to each layer.
+    dropout: When not `None`, the probability we will drop out a given
+      coordinate.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+    features: This is the first item returned from the `input_fn` passed to
+      `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+      `dict` of same.
+    mode: Optional. Specifies if this training, evaluation or prediction. See
+      `ModeKeys`.
+
+  Returns:
+    A `Tensor` representing the logits, or a list of `Tensor`'s representing
+      multiple logits in the MultiHead case.
+    A list of trainable variables.
+    A list of update ops.
+
+  Raises:
+    ValueError: If units is not an int.
+  """
+  if not isinstance(units, six.integer_types):
+    raise ValueError('units must be an int.  Given type: {}'.format(
+        type(units)))
+  dnn_model = _DNNModelV2(
+      units,
+      hidden_units,
+      feature_columns,
+      activation_fn,
+      dropout,
+      batch_norm,
+      name='dnn')
+  logits = dnn_model(features, mode)
+  trainable_variables = dnn_model.trainable_variables
+  update_ops = dnn_model.updates
+
+  return logits, trainable_variables, update_ops
+
+
+def dnn_model_fn_v2(features,
+                    labels,
+                    mode,
+                    head,
+                    hidden_units,
+                    feature_columns,
+                    optimizer='Adagrad',
+                    activation_fn=tf.nn.relu,
+                    dropout=None,
+                    config=None,
+                    use_tpu=False,
+                    batch_norm=False):
+  """Deep Neural Net model_fn v2.
+
+  This function is different than _dnn_model_fn_v1 in the way it handles the
+  optimizer when a String optimizer name is passed.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
+      `int32` or `int64` in the range `[0, n_classes)`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `base_head.Head` instance.
+    hidden_units: Iterable of integer number of hidden units per layer.
+    feature_columns: Iterable of `feature_column._FeatureColumn` model inputs.
+    optimizer: String, `tf.keras.optimizers.Optimizer` object, or callable that
+      creates the optimizer to use for training. If not specified, will use the
+      Adagrad optimizer. If it is String, the default learning rate of the
+      optimizer will be used. If it is String, and optimizer does not have a
+      default learning rate, then, a fixed learning rate of 0.05 is used.
+    activation_fn: Activation function applied to each layer.
+    dropout: When not `None`, the probability we will drop out a given
+      coordinate.
+    config: `RunConfig` object to configure the runtime settings.
+    use_tpu: Whether to make a DNN model able to run on TPU. Will make function
+      return a `_TPUEstimatorSpec` instance and disable variable partitioning.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: If features has the wrong type.
+  """
+  _validate_features(features)
+
+  del config
+
+  logits, trainable_variables, update_ops = _dnn_model_fn_builder_v2(
+      units=head.logits_dimension,
+      hidden_units=hidden_units,
+      feature_columns=feature_columns,
+      activation_fn=activation_fn,
+      dropout=dropout,
+      batch_norm=batch_norm,
+      features=features,
+      mode=mode)
+
+  # In TRAIN mode, create optimizer and assign global_step variable to
+  # optimizer.iterations to make global_step increased correctly, as Hooks
+  # relies on global step as step counter.
+  if mode == ModeKeys.TRAIN:
+    optimizer = optimizers.get_optimizer_instance_v2(optimizer)
+    optimizer.iterations = tf.compat.v1.train.get_or_create_global_step()
+
+  # Create EstimatorSpec.
+  if use_tpu:
+    estimator_spec_fn = head._create_tpu_estimator_spec  # pylint: disable=protected-access
+  else:
+    estimator_spec_fn = head.create_estimator_spec  # pylint: disable=protected-access
+
+  return estimator_spec_fn(
+      features=features,
+      mode=mode,
+      labels=labels,
+      optimizer=optimizer,
+      logits=logits,
+      trainable_variables=trainable_variables,
+      update_ops=update_ops)
+
+
+@estimator_export('estimator.DNNClassifier', v1=[])
+class DNNClassifierV2(estimator.EstimatorV2):
+  """A classifier for TensorFlow DNN models.
+
+  Example:
+
+  ```python
+  categorical_feature_a = categorical_column_with_hash_bucket(...)
+  categorical_feature_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_emb = embedding_column(
+      categorical_column=categorical_feature_a, ...)
+  categorical_feature_b_emb = embedding_column(
+      categorical_column=categorical_feature_b, ...)
+
+  estimator = tf.estimator.DNNClassifier(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256])
+
+  # Or estimator using the ProximalAdagradOptimizer optimizer with
+  # regularization.
+  estimator = tf.estimator.DNNClassifier(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=tf.compat.v1.train.ProximalAdagradOptimizer(
+        learning_rate=0.1,
+        l1_regularization_strength=0.001
+      ))
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.DNNClassifier(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=lambda: tf.keras.optimizers.Adam(
+          learning_rate=tf.compat.v1.train.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.compat.v1.train.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator with warm-starting from a previous checkpoint.
+  estimator = tf.estimator.DNNClassifier(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      warm_start_from="/path/to/checkpoint/dir")
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train)
+  metrics = estimator.evaluate(input_fn=input_fn_eval)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using softmax cross entropy.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(
+      self,
+      hidden_units,
+      feature_columns,
+      model_dir=None,
+      n_classes=2,
+      weight_column=None,
+      label_vocabulary=None,
+      optimizer='Adagrad',
+      activation_fn=tf.nn.relu,
+      dropout=None,
+      config=None,
+      warm_start_from=None,
+      loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+      batch_norm=False,
+  ):
+    """Initializes a `DNNClassifier` instance.
+
+    Args:
+      hidden_units: Iterable of number hidden units per layer. All layers are
+        fully connected. Ex. `[64, 32]` means first layer has 64 nodes and
+        second one has 32.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `_FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      n_classes: Number of label classes. Defaults to 2, namely binary
+        classification. Must be > 1.
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      label_vocabulary: A list of strings represents possible label values. If
+        given, labels must be string type and have any value in
+        `label_vocabulary`. If it is not given, that means labels are already
+        encoded as integer or float within [0, 1] for `n_classes=2` and encoded
+        as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also
+        there will be errors if vocabulary is not provided and labels are
+        string.
+      optimizer: An instance of `tf.keras.optimizers.*` used to train the model.
+        Can also be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp',
+        SGD'), or callable. Defaults to Adagrad optimizer.
+      activation_fn: Activation function applied to each layer. If `None`, will
+        use `tf.nn.relu`.
+      dropout: When not `None`, the probability we will drop out a given
+        coordinate.
+      config: `RunConfig` object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights are warm-started, and it is assumed that vocabularies and Tensor
+        names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      batch_norm: Whether to use batch normalization after each hidden layer.
+    """
+    head = head_utils.binary_or_multi_class_head(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set('DNN')
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared dnn_model_fn_v2."""
+      return dnn_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          config=config,
+          batch_norm=batch_norm)
+
+    super(DNNClassifierV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.DNNClassifier'])  # pylint: disable=missing-docstring
+class DNNClassifier(estimator.Estimator):
+  __doc__ = DNNClassifierV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(
+      self,
+      hidden_units,
+      feature_columns,
+      model_dir=None,
+      n_classes=2,
+      weight_column=None,
+      label_vocabulary=None,
+      optimizer='Adagrad',
+      activation_fn=tf.nn.relu,
+      dropout=None,
+      input_layer_partitioner=None,
+      config=None,
+      warm_start_from=None,
+      loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+      batch_norm=False,
+  ):
+    head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
+        n_classes, weight_column, label_vocabulary, loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set('DNN')
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared dnn_model_fn."""
+      return _dnn_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          batch_norm=batch_norm)
+
+    super(DNNClassifier, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export('estimator.DNNEstimator', v1=[])
+class DNNEstimatorV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow DNN models with user-specified head.
+
+  Example:
+
+  ```python
+  sparse_feature_a = sparse_column_with_hash_bucket(...)
+  sparse_feature_b = sparse_column_with_hash_bucket(...)
+
+  sparse_feature_a_emb = embedding_column(sparse_id_column=sparse_feature_a,
+                                          ...)
+  sparse_feature_b_emb = embedding_column(sparse_id_column=sparse_feature_b,
+                                          ...)
+
+  estimator = tf.estimator.DNNEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
+      hidden_units=[1024, 512, 256])
+
+  # Or estimator using the ProximalAdagradOptimizer optimizer with
+  # regularization.
+  estimator = tf.estimator.DNNEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=tf.compat.v1.train.ProximalAdagradOptimizer(
+        learning_rate=0.1,
+        l1_regularization_strength=0.001
+      ))
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.DNNEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=lambda: tf.keras.optimizers.Adam(
+          learning_rate=tf.compat.v1.train.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.compat.v1.train.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator with warm-starting from a previous checkpoint.
+  estimator = tf.estimator.DNNEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      warm_start_from="/path/to/checkpoint/dir")
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train)
+  metrics = estimator.evaluate(input_fn=input_fn_eval)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss and predicted output are determined by the specified head.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               head,
+               hidden_units,
+               feature_columns,
+               model_dir=None,
+               optimizer='Adagrad',
+               activation_fn=tf.nn.relu,
+               dropout=None,
+               config=None,
+               warm_start_from=None,
+               batch_norm=False):
+    """Initializes a `DNNEstimator` instance.
+
+    Args:
+      head: A `_Head` instance constructed with a method such as
+        `tf.contrib.estimator.multi_label_head`.
+      hidden_units: Iterable of number hidden units per layer. All layers are
+        fully connected. Ex. `[64, 32]` means first layer has 64 nodes and
+        second one has 32.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `_FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      optimizer: An instance of `tf.keras.optimizers.*` used to train the model.
+        Can also be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp',
+        SGD'), or callable. Defaults to Adagrad optimizer.
+      activation_fn: Activation function applied to each layer. If `None`, will
+        use `tf.nn.relu`.
+      dropout: When not `None`, the probability we will drop out a given
+        coordinate.
+      config: `RunConfig` object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights are warm-started, and it is assumed that vocabularies and Tensor
+        names are unchanged.
+      batch_norm: Whether to use batch normalization after each hidden layer.
+    """
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared dnn_model_fn_v2."""
+      return dnn_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          config=config,
+          batch_norm=batch_norm)
+
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set('DNN')  # pylint: disable=protected-access
+    super(DNNEstimatorV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.DNNEstimator'])  # pylint: disable=missing-docstring
+class DNNEstimator(estimator.Estimator):
+  __doc__ = DNNEstimatorV2.__doc__
+
+  def __init__(self,
+               head,
+               hidden_units,
+               feature_columns,
+               model_dir=None,
+               optimizer='Adagrad',
+               activation_fn=tf.nn.relu,
+               dropout=None,
+               input_layer_partitioner=None,
+               config=None,
+               warm_start_from=None,
+               batch_norm=False):
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _dnn_model_fn."""
+      return _dnn_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          batch_norm=batch_norm)
+
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set('DNN')  # pylint: disable=protected-access
+    super(DNNEstimator, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export('estimator.DNNRegressor', v1=[])
+class DNNRegressorV2(estimator.EstimatorV2):
+  """A regressor for TensorFlow DNN models.
+
+  Example:
+
+  ```python
+  categorical_feature_a = categorical_column_with_hash_bucket(...)
+  categorical_feature_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_emb = embedding_column(
+      categorical_column=categorical_feature_a, ...)
+  categorical_feature_b_emb = embedding_column(
+      categorical_column=categorical_feature_b, ...)
+
+  estimator = tf.estimator.DNNRegressor(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256])
+
+  # Or estimator using the ProximalAdagradOptimizer optimizer with
+  # regularization.
+  estimator = tf.estimator.DNNRegressor(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=tf.compat.v1.train.ProximalAdagradOptimizer(
+        learning_rate=0.1,
+        l1_regularization_strength=0.001
+      ))
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.DNNRegressor(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      optimizer=lambda: tf.keras.optimizers.Adam(
+          learning_rate=tf.compat.v1.train.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.compat.v1.train.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator with warm-starting from a previous checkpoint.
+  estimator = tf.estimator.DNNRegressor(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      warm_start_from="/path/to/checkpoint/dir")
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train)
+  metrics = estimator.evaluate(input_fn=input_fn_eval)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using mean squared error.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(
+      self,
+      hidden_units,
+      feature_columns,
+      model_dir=None,
+      label_dimension=1,
+      weight_column=None,
+      optimizer='Adagrad',
+      activation_fn=tf.nn.relu,
+      dropout=None,
+      config=None,
+      warm_start_from=None,
+      loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+      batch_norm=False,
+  ):
+    """Initializes a `DNNRegressor` instance.
+
+    Args:
+      hidden_units: Iterable of number hidden units per layer. All layers are
+        fully connected. Ex. `[64, 32]` means first layer has 64 nodes and
+        second one has 32.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      label_dimension: Number of regression targets per example. This is the
+        size of the last dimension of the labels and logits `Tensor` objects
+        (typically, these have shape `[batch_size, label_dimension]`).
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      optimizer: An instance of `tf.keras.optimizers.*` used to train the model.
+        Can also be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp',
+        SGD'), or callable. Defaults to Adagrad optimizer.
+      activation_fn: Activation function applied to each layer. If `None`, will
+        use `tf.nn.relu`.
+      dropout: When not `None`, the probability we will drop out a given
+        coordinate.
+      config: `RunConfig` object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights are warm-started, and it is assumed that vocabularies and Tensor
+        names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      batch_norm: Whether to use batch normalization after each hidden layer.
+    """
+    head = regression_head.RegressionHead(
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set('DNN')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared dnn_model_fn_v2."""
+      return dnn_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          config=config,
+          batch_norm=batch_norm)
+
+    super(DNNRegressorV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.DNNRegressor'])  # pylint: disable=missing-docstring
+class DNNRegressor(estimator.Estimator):
+  __doc__ = DNNRegressorV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(
+      self,
+      hidden_units,
+      feature_columns,
+      model_dir=None,
+      label_dimension=1,
+      weight_column=None,
+      optimizer='Adagrad',
+      activation_fn=tf.nn.relu,
+      dropout=None,
+      input_layer_partitioner=None,
+      config=None,
+      warm_start_from=None,
+      loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+      batch_norm=False,
+  ):
+    head = head_lib._regression_head(  # pylint: disable=protected-access
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set('DNN')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _dnn_model_fn."""
+      return _dnn_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          activation_fn=activation_fn,
+          dropout=dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          batch_norm=batch_norm)
+
+    super(DNNRegressor, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_linear_combined.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_linear_combined.py
new file mode 100644
index 00000000..0f56662a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_linear_combined.py
@@ -0,0 +1,1140 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""TensorFlow estimators for Linear and DNN joined training models."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+
+import six
+import tensorflow as tf
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import dnn
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.canned import linear
+from tensorflow_estimator.python.estimator.canned import optimizers
+from tensorflow_estimator.python.estimator.head import head_utils
+from tensorflow_estimator.python.estimator.head import regression_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# The default learning rates are a historical artifact of the initial
+# implementation.
+_DNN_LEARNING_RATE = 0.001
+_LINEAR_LEARNING_RATE = 0.005
+
+
+def _check_no_sync_replicas_optimizer(optimizer):
+  if isinstance(optimizer, tf.compat.v1.train.SyncReplicasOptimizer):
+    raise ValueError(
+        'SyncReplicasOptimizer does not support multi optimizers case. '
+        'Therefore, it is not supported in DNNLinearCombined model. '
+        'If you want to use this optimizer, please use either DNN or Linear '
+        'model.')
+
+
+def _linear_learning_rate(num_linear_feature_columns):
+  """Returns the default learning rate of the linear model.
+
+  The calculation is a historical artifact of this initial implementation, but
+  has proven a reasonable choice.
+
+  Args:
+    num_linear_feature_columns: The number of feature columns of the linear
+      model.
+
+  Returns:
+    A float.
+  """
+  default_learning_rate = 1. / math.sqrt(num_linear_feature_columns)
+  return min(_LINEAR_LEARNING_RATE, default_learning_rate)
+
+
+def _add_layer_summary(value, tag):
+  tf.compat.v1.summary.scalar('%s/fraction_of_zero_values' % tag,
+                              tf.math.zero_fraction(value))
+  tf.compat.v1.summary.histogram('%s/activation' % tag, value)
+
+
+def _validate_feature_columns(linear_feature_columns, dnn_feature_columns):
+  """Validates feature columns DNNLinearCombinedRegressor."""
+  linear_feature_columns = linear_feature_columns or []
+  dnn_feature_columns = dnn_feature_columns or []
+  feature_columns = (list(linear_feature_columns) + list(dnn_feature_columns))
+  if not feature_columns:
+    raise ValueError('Either linear_feature_columns or dnn_feature_columns '
+                     'must be defined.')
+  return feature_columns
+
+
+def _dnn_linear_combined_model_fn_v2(
+    features,
+    labels,
+    mode,
+    head,
+    linear_feature_columns=None,
+    linear_optimizer='Ftrl',
+    dnn_feature_columns=None,
+    dnn_optimizer='Adagrad',
+    dnn_hidden_units=None,
+    dnn_activation_fn=tf.nn.relu,
+    dnn_dropout=None,
+    config=None,
+    batch_norm=False,
+    linear_sparse_combiner='sum',
+    loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE):
+  """Deep Neural Net and Linear combined model_fn.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
+      `int32` or `int64` in the range `[0, n_classes)`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    linear_feature_columns: An iterable containing all the feature columns used
+      by the Linear model.
+    linear_optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training the Linear model. Defaults to the Ftrl
+      optimizer.
+    dnn_feature_columns: An iterable containing all the feature columns used by
+      the DNN model.
+    dnn_optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training the DNN model. Defaults to the Adagrad
+      optimizer.
+    dnn_hidden_units: List of hidden units per DNN layer.
+    dnn_activation_fn: Activation function applied to each DNN layer. If `None`,
+      will use `tf.nn.relu`.
+    dnn_dropout: When not `None`, the probability we will drop out a given DNN
+      coordinate.
+    config: `RunConfig` object to configure the runtime settings.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+    linear_sparse_combiner: A string specifying how to reduce the linear model
+      if a categorical column is multivalent.  One of "mean", "sqrtn", and
+      "sum".
+    loss_reduction: One of `tf.keras.losses.Reduction` except `NONE`. Describes
+      how to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: If both `linear_feature_columns` and `dnn_features_columns`
+      are empty at the same time, or `input_layer_partitioner` is missing,
+      or features has the wrong type.
+  """
+  if not isinstance(features, dict):
+    raise ValueError('features should be a dictionary of `Tensor`s. '
+                     'Given type: {}'.format(type(features)))
+  if not linear_feature_columns and not dnn_feature_columns:
+    raise ValueError(
+        'Either linear_feature_columns or dnn_feature_columns must be defined.')
+
+  del config
+
+  # Build DNN Logits.
+  if not dnn_feature_columns:
+    dnn_logits = None
+  else:
+    if mode == ModeKeys.TRAIN:
+      dnn_optimizer = optimizers.get_optimizer_instance_v2(
+          dnn_optimizer, learning_rate=_DNN_LEARNING_RATE)
+      _check_no_sync_replicas_optimizer(dnn_optimizer)
+
+    if not dnn_hidden_units:
+      raise ValueError(
+          'dnn_hidden_units must be defined when dnn_feature_columns is '
+          'specified.')
+    dnn_logits, dnn_trainable_variables, dnn_update_ops = (
+        dnn._dnn_model_fn_builder_v2(  # pylint: disable=protected-access
+            units=head.logits_dimension,
+            hidden_units=dnn_hidden_units,
+            feature_columns=dnn_feature_columns,
+            activation_fn=dnn_activation_fn,
+            dropout=dnn_dropout,
+            batch_norm=batch_norm,
+            features=features,
+            mode=mode))
+
+  if not linear_feature_columns:
+    linear_logits = None
+  else:
+    if mode == ModeKeys.TRAIN:
+      linear_optimizer = optimizers.get_optimizer_instance_v2(
+          linear_optimizer,
+          learning_rate=_linear_learning_rate(len(linear_feature_columns)))
+      _check_no_sync_replicas_optimizer(linear_optimizer)
+
+    linear_logits, linear_trainable_variables = (
+        linear._linear_model_fn_builder_v2(  # pylint: disable=protected-access
+            units=head.logits_dimension,
+            feature_columns=linear_feature_columns,
+            sparse_combiner=linear_sparse_combiner,
+            features=features))
+    _add_layer_summary(linear_logits, 'linear')
+
+  # Combine logits and build full model.
+  if dnn_logits is not None and linear_logits is not None:
+    logits = dnn_logits + linear_logits
+  elif dnn_logits is not None:
+    logits = dnn_logits
+  else:
+    logits = linear_logits
+
+  def _train_op_fn(loss):
+    """Returns the op to optimize the loss."""
+    train_ops = []
+    # Scale loss by number of replicas.
+    if loss_reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE:
+      loss = losses_utils.scale_loss_for_distribution(loss)
+
+    if dnn_logits is not None:
+      train_ops.extend(dnn_optimizer.get_updates(loss, dnn_trainable_variables))
+      if dnn_update_ops is not None:
+        train_ops.extend(dnn_update_ops)
+    if linear_logits is not None:
+      train_ops.extend(
+          linear_optimizer.get_updates(loss, linear_trainable_variables))
+    train_op = tf.group(*train_ops)
+    return train_op
+
+  # In TRAIN mode, asssign global_step variable to optimizer.iterations to
+  # make global_step increased correctly, as Hooks relies on global step as
+  # step counter. Note that, Only one model's optimizer needs this assignment.
+  if mode == ModeKeys.TRAIN:
+    if dnn_logits is not None:
+      dnn_optimizer.iterations = tf.compat.v1.train.get_or_create_global_step()
+    else:
+      linear_optimizer.iterations = \
+        tf.compat.v1.train.get_or_create_global_step()
+
+  return head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      labels=labels,
+      train_op_fn=_train_op_fn,
+      logits=logits)
+
+
+def _dnn_linear_combined_model_fn(features,
+                                  labels,
+                                  mode,
+                                  head,
+                                  linear_feature_columns=None,
+                                  linear_optimizer='Ftrl',
+                                  dnn_feature_columns=None,
+                                  dnn_optimizer='Adagrad',
+                                  dnn_hidden_units=None,
+                                  dnn_activation_fn=tf.nn.relu,
+                                  dnn_dropout=None,
+                                  input_layer_partitioner=None,
+                                  config=None,
+                                  batch_norm=False,
+                                  linear_sparse_combiner='sum'):
+  """Deep Neural Net and Linear combined model_fn.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
+      `int32` or `int64` in the range `[0, n_classes)`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    linear_feature_columns: An iterable containing all the feature columns used
+      by the Linear model.
+    linear_optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training the Linear model. Defaults to the Ftrl
+      optimizer.
+    dnn_feature_columns: An iterable containing all the feature columns used by
+      the DNN model.
+    dnn_optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training the DNN model. Defaults to the Adagrad
+      optimizer.
+    dnn_hidden_units: List of hidden units per DNN layer.
+    dnn_activation_fn: Activation function applied to each DNN layer. If `None`,
+      will use `tf.nn.relu`.
+    dnn_dropout: When not `None`, the probability we will drop out a given DNN
+      coordinate.
+    input_layer_partitioner: Partitioner for input layer.
+    config: `RunConfig` object to configure the runtime settings.
+    batch_norm: Whether to use batch normalization after each hidden layer.
+    linear_sparse_combiner: A string specifying how to reduce the linear model
+      if a categorical column is multivalent.  One of "mean", "sqrtn", and
+      "sum".
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: If both `linear_feature_columns` and `dnn_features_columns`
+      are empty at the same time, or `input_layer_partitioner` is missing,
+      or features has the wrong type.
+  """
+  if not isinstance(features, dict):
+    raise ValueError('features should be a dictionary of `Tensor`s. '
+                     'Given type: {}'.format(type(features)))
+  if not linear_feature_columns and not dnn_feature_columns:
+    raise ValueError(
+        'Either linear_feature_columns or dnn_feature_columns must be defined.')
+
+  num_ps_replicas = config.num_ps_replicas if config else 0
+  input_layer_partitioner = input_layer_partitioner or (
+      tf.compat.v1.min_max_variable_partitioner(
+          max_partitions=num_ps_replicas, min_slice_size=64 << 20))
+
+  # Build DNN Logits.
+  dnn_parent_scope = 'dnn'
+
+  if not dnn_feature_columns:
+    dnn_logits = None
+  else:
+    dnn_optimizer = optimizers.get_optimizer_instance(
+        dnn_optimizer, learning_rate=_DNN_LEARNING_RATE)
+    _check_no_sync_replicas_optimizer(dnn_optimizer)
+    if not dnn_hidden_units:
+      raise ValueError(
+          'dnn_hidden_units must be defined when dnn_feature_columns is '
+          'specified.')
+    dnn_partitioner = (
+        tf.compat.v1.min_max_variable_partitioner(
+            max_partitions=num_ps_replicas))
+    with tf.compat.v1.variable_scope(
+        dnn_parent_scope,
+        values=tuple(six.itervalues(features)),
+        partitioner=dnn_partitioner) as scope:
+      dnn_absolute_scope = scope.name
+      dnn_logit_fn = dnn.dnn_logit_fn_builder(
+          units=head.logits_dimension,
+          hidden_units=dnn_hidden_units,
+          feature_columns=dnn_feature_columns,
+          activation_fn=dnn_activation_fn,
+          dropout=dnn_dropout,
+          batch_norm=batch_norm,
+          input_layer_partitioner=input_layer_partitioner)
+      dnn_logits = dnn_logit_fn(features=features, mode=mode)
+
+  linear_parent_scope = 'linear'
+
+  if not linear_feature_columns:
+    linear_logits = None
+  else:
+    linear_optimizer = optimizers.get_optimizer_instance(
+        linear_optimizer,
+        learning_rate=_linear_learning_rate(len(linear_feature_columns)))
+    _check_no_sync_replicas_optimizer(linear_optimizer)
+    with tf.compat.v1.variable_scope(
+        linear_parent_scope,
+        values=tuple(six.itervalues(features)),
+        partitioner=input_layer_partitioner) as scope:
+      linear_absolute_scope = scope.name
+      logit_fn = linear.linear_logit_fn_builder(
+          units=head.logits_dimension,
+          feature_columns=linear_feature_columns,
+          sparse_combiner=linear_sparse_combiner)
+      linear_logits = logit_fn(features=features)
+      _add_layer_summary(linear_logits, scope.name)
+
+  # Combine logits and build full model.
+  if dnn_logits is not None and linear_logits is not None:
+    logits = dnn_logits + linear_logits
+  elif dnn_logits is not None:
+    logits = dnn_logits
+  else:
+    logits = linear_logits
+
+  def _train_op_fn(loss):
+    """Returns the op to optimize the loss."""
+    train_ops = []
+    global_step = tf.compat.v1.train.get_global_step()
+    if dnn_logits is not None:
+      train_ops.append(
+          dnn_optimizer.minimize(
+              loss,
+              var_list=tf.compat.v1.get_collection(
+                  tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES,
+                  scope=dnn_absolute_scope)))
+    if linear_logits is not None:
+      train_ops.append(
+          linear_optimizer.minimize(
+              loss,
+              var_list=tf.compat.v1.get_collection(
+                  tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES,
+                  scope=linear_absolute_scope)))
+
+    train_op = tf.group(*train_ops)
+    with tf.control_dependencies([train_op]):
+      return tf.compat.v1.assign_add(global_step, 1).op
+
+  return head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      labels=labels,
+      train_op_fn=_train_op_fn,
+      logits=logits)
+
+
+@estimator_export('estimator.DNNLinearCombinedClassifier', v1=[])
+class DNNLinearCombinedClassifierV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow Linear and DNN joined classification models.
+
+  Note: This estimator is also known as wide-n-deep.
+
+  Example:
+
+  ```python
+  numeric_feature = numeric_column(...)
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+  categorical_feature_a_emb = embedding_column(
+      categorical_column=categorical_feature_a, ...)
+  categorical_feature_b_emb = embedding_column(
+      categorical_id_column=categorical_feature_b, ...)
+
+  estimator = tf.estimator.DNNLinearCombinedClassifier(
+      # wide settings
+      linear_feature_columns=[categorical_feature_a_x_categorical_feature_b],
+      linear_optimizer=tf.keras.optimizers.Ftrl(...),
+      # deep settings
+      dnn_feature_columns=[
+          categorical_feature_a_emb, categorical_feature_b_emb,
+          numeric_feature],
+      dnn_hidden_units=[1000, 500, 100],
+      dnn_optimizer=tf.keras.optimizers.Adagrad(...),
+      # warm-start settings
+      warm_start_from="/path/to/checkpoint/dir")
+
+  # To apply L1 and L2 regularization, you can set dnn_optimizer to:
+  tf.compat.v1.train.ProximalAdagradOptimizer(
+      learning_rate=0.1,
+      l1_regularization_strength=0.001,
+      l2_regularization_strength=0.001)
+  # To apply learning rate decay, you can set dnn_optimizer to a callable:
+  lambda: tf.keras.optimizers.Adam(
+      learning_rate=tf.compat.v1.train.exponential_decay(
+          learning_rate=0.1,
+          global_step=tf.compat.v1.train.get_global_step(),
+          decay_steps=10000,
+          decay_rate=0.96)
+  # It is the same for linear_optimizer.
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * for each `column` in `dnn_feature_columns` + `linear_feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using softmax cross entropy.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               config=None,
+               warm_start_from=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               batch_norm=False,
+               linear_sparse_combiner='sum'):
+    """Initializes a DNNLinearCombinedClassifier instance.
+
+    Args:
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      linear_feature_columns: An iterable containing all the feature columns
+        used by linear part of the model. All items in the set must be instances
+        of classes derived from `FeatureColumn`.
+      linear_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the linear part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        FTRL optimizer.
+      dnn_feature_columns: An iterable containing all the feature columns used
+        by deep part of the model. All items in the set must be instances of
+        classes derived from `FeatureColumn`.
+      dnn_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the deep part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        Adagrad optimizer.
+      dnn_hidden_units: List of hidden units per layer. All layers are fully
+        connected.
+      dnn_activation_fn: Activation function applied to each layer. If None,
+        will use `tf.nn.relu`.
+      dnn_dropout: When not None, the probability we will drop out a given
+        coordinate.
+      n_classes: Number of label classes. Defaults to 2, namely binary
+        classification. Must be > 1.
+      weight_column: A string or a `_NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      label_vocabulary: A list of strings represents possible label values. If
+        given, labels must be string type and have any value in
+        `label_vocabulary`. If it is not given, that means labels are already
+        encoded as integer or float within [0, 1] for `n_classes=2` and encoded
+        as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also
+        there will be errors if vocabulary is not provided and labels are
+        string.
+      config: RunConfig object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights are warm-started, and it is assumed that vocabularies and Tensor
+        names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      batch_norm: Whether to use batch normalization after each hidden layer.
+      linear_sparse_combiner: A string specifying how to reduce the linear model
+        if a categorical column is multivalent.  One of "mean", "sqrtn", and
+        "sum" -- these are effectively different ways to do example-level
+        normalization, which can be useful for bag-of-words features.  For more
+        details, see `tf.feature_column.linear_model`.
+
+    Raises:
+      ValueError: If both linear_feature_columns and dnn_features_columns are
+        empty at the same time.
+    """
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+
+    head = head_utils.binary_or_multi_class_head(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set(  # pylint: disable=protected-access
+        'DNNLinearCombined')
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          config=config,
+          batch_norm=batch_norm,
+          linear_sparse_combiner=linear_sparse_combiner,
+          loss_reduction=loss_reduction)
+
+    super(DNNLinearCombinedClassifierV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.DNNLinearCombinedClassifier'])  # pylint: disable=missing-docstring
+class DNNLinearCombinedClassifier(estimator.Estimator):
+  __doc__ = DNNLinearCombinedClassifierV2.__doc__.replace(
+      'SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               input_layer_partitioner=None,
+               config=None,
+               warm_start_from=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               batch_norm=False,
+               linear_sparse_combiner='sum'):
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+
+    head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
+        n_classes, weight_column, label_vocabulary, loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set(
+        'DNNLinearCombined')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          batch_norm=batch_norm,
+          linear_sparse_combiner=linear_sparse_combiner)
+
+    super(DNNLinearCombinedClassifier, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+def _init_dnn_linear_combined_estimator(head, linear_feature_columns,
+                                        linear_optimizer, dnn_feature_columns,
+                                        dnn_optimizer, dnn_hidden_units,
+                                        dnn_activation_fn, dnn_dropout,
+                                        input_layer_partitioner,
+                                        linear_sparse_combiner):
+  """Helper function for the initialization of DNNLinearCombinedEstimator."""
+  linear_feature_columns = linear_feature_columns or []
+  dnn_feature_columns = dnn_feature_columns or []
+  feature_columns = (list(linear_feature_columns) + list(dnn_feature_columns))
+  if not feature_columns:
+    raise ValueError('Either linear_feature_columns or dnn_feature_columns '
+                     'must be defined.')
+
+  def _model_fn(features, labels, mode, config):
+    """Call the _dnn_linear_combined_model_fn."""
+    return _dnn_linear_combined_model_fn(
+        features=features,
+        labels=labels,
+        mode=mode,
+        head=head,
+        linear_feature_columns=linear_feature_columns,
+        linear_optimizer=linear_optimizer,
+        dnn_feature_columns=dnn_feature_columns,
+        dnn_optimizer=dnn_optimizer,
+        dnn_hidden_units=dnn_hidden_units,
+        dnn_activation_fn=dnn_activation_fn,
+        dnn_dropout=dnn_dropout,
+        input_layer_partitioner=input_layer_partitioner,
+        config=config,
+        linear_sparse_combiner=linear_sparse_combiner)
+
+  return feature_columns, _model_fn
+
+
+@estimator_export('estimator.DNNLinearCombinedEstimator', v1=[])
+class DNNLinearCombinedEstimatorV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow Linear and DNN joined models with custom head.
+
+  Note: This estimator is also known as wide-n-deep.
+
+  Example:
+
+  ```python
+  numeric_feature = numeric_column(...)
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+  categorical_feature_a_emb = embedding_column(
+      categorical_column=categorical_feature_a, ...)
+  categorical_feature_b_emb = embedding_column(
+      categorical_column=categorical_feature_b, ...)
+
+  estimator = tf.estimator.DNNLinearCombinedEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      # wide settings
+      linear_feature_columns=[categorical_feature_a_x_categorical_feature_b],
+      linear_optimizer=tf.keras.optimizers.Ftrl(...),
+      # deep settings
+      dnn_feature_columns=[
+          categorical_feature_a_emb, categorical_feature_b_emb,
+          numeric_feature],
+      dnn_hidden_units=[1000, 500, 100],
+      dnn_optimizer=tf.keras.optimizers.Adagrad(...))
+
+  # To apply L1 and L2 regularization, you can set dnn_optimizer to:
+  tf.compat.v1.train.ProximalAdagradOptimizer(
+      learning_rate=0.1,
+      l1_regularization_strength=0.001,
+      l2_regularization_strength=0.001)
+  # To apply learning rate decay, you can set dnn_optimizer to a callable:
+  lambda: tf.keras.optimizers.Adam(
+      learning_rate=tf.compat.v1.train.exponential_decay(
+          learning_rate=0.1,
+          global_step=tf.compat.v1.train.get_global_step(),
+          decay_steps=10000,
+          decay_rate=0.96)
+  # It is the same for linear_optimizer.
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * for each `column` in `dnn_feature_columns` + `linear_feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using mean squared error.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               head,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               config=None,
+               linear_sparse_combiner='sum'):
+    """Initializes a DNNLinearCombinedEstimator instance.
+
+    Args:
+      head: A `Head` instance constructed with a method such as
+        `tf.estimator.MultiLabelHead`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into an estimator to
+        continue training a previously saved model.
+      linear_feature_columns: An iterable containing all the feature columns
+        used by linear part of the model. All items in the set must be instances
+        of classes derived from `FeatureColumn`.
+      linear_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the linear part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        FTRL optimizer.
+      dnn_feature_columns: An iterable containing all the feature columns used
+        by deep part of the model. All items in the set must be instances of
+        classes derived from `FeatureColumn`.
+      dnn_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the deep part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        Adagrad optimizer.
+      dnn_hidden_units: List of hidden units per layer. All layers are fully
+        connected.
+      dnn_activation_fn: Activation function applied to each layer. If None,
+        will use `tf.nn.relu`.
+      dnn_dropout: When not None, the probability we will drop out a given
+        coordinate.
+      config: RunConfig object to configure the runtime settings.
+      linear_sparse_combiner: A string specifying how to reduce the linear model
+        if a categorical column is multivalent.  One of "mean", "sqrtn", and
+        "sum" -- these are effectively different ways to do example-level
+        normalization, which can be useful for bag-of-words features.  For more
+        details, see `tf.feature_column.linear_model`.
+
+    Raises:
+      ValueError: If both linear_feature_columns and dnn_features_columns are
+        empty at the same time.
+    """
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set(
+        'DNNLinearCombined')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          config=config,
+          linear_sparse_combiner=linear_sparse_combiner)
+
+    super(DNNLinearCombinedEstimatorV2, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export(v1=['estimator.DNNLinearCombinedEstimator'])  # pylint: disable=missing-docstring
+class DNNLinearCombinedEstimator(estimator.Estimator):
+  __doc__ = DNNLinearCombinedEstimatorV2.__doc__
+
+  def __init__(self,
+               head,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               input_layer_partitioner=None,
+               config=None,
+               linear_sparse_combiner='sum'):
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set(
+        'DNNLinearCombined')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          linear_sparse_combiner=linear_sparse_combiner)
+
+    super(DNNLinearCombinedEstimator, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export('estimator.DNNLinearCombinedRegressor', v1=[])
+class DNNLinearCombinedRegressorV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow Linear and DNN joined models for regression.
+
+  Note: This estimator is also known as wide-n-deep.
+
+  Example:
+
+  ```python
+  numeric_feature = numeric_column(...)
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+  categorical_feature_a_emb = embedding_column(
+      categorical_column=categorical_feature_a, ...)
+  categorical_feature_b_emb = embedding_column(
+      categorical_column=categorical_feature_b, ...)
+
+  estimator = tf.estimator.DNNLinearCombinedRegressor(
+      # wide settings
+      linear_feature_columns=[categorical_feature_a_x_categorical_feature_b],
+      linear_optimizer=tf.keras.optimizers.Ftrl(...),
+      # deep settings
+      dnn_feature_columns=[
+          categorical_feature_a_emb, categorical_feature_b_emb,
+          numeric_feature],
+      dnn_hidden_units=[1000, 500, 100],
+      dnn_optimizer=tf.keras.optimizers.Adagrad(...),
+      # warm-start settings
+      warm_start_from="/path/to/checkpoint/dir")
+
+  # To apply L1 and L2 regularization, you can set dnn_optimizer to:
+  tf.compat.v1.train.ProximalAdagradOptimizer(
+      learning_rate=0.1,
+      l1_regularization_strength=0.001,
+      l2_regularization_strength=0.001)
+  # To apply learning rate decay, you can set dnn_optimizer to a callable:
+  lambda: tf.keras.optimizers.Adam(
+      learning_rate=tf.compat.v1.train.exponential_decay(
+          learning_rate=0.1,
+          global_step=tf.compat.v1.train.get_global_step(),
+          decay_steps=10000,
+          decay_rate=0.96)
+  # It is the same for linear_optimizer.
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * for each `column` in `dnn_feature_columns` + `linear_feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using mean squared error.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               label_dimension=1,
+               weight_column=None,
+               config=None,
+               warm_start_from=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               batch_norm=False,
+               linear_sparse_combiner='sum'):
+    """Initializes a DNNLinearCombinedRegressor instance.
+
+    Args:
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      linear_feature_columns: An iterable containing all the feature columns
+        used by linear part of the model. All items in the set must be instances
+        of classes derived from `FeatureColumn`.
+      linear_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the linear part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        FTRL optimizer.
+      dnn_feature_columns: An iterable containing all the feature columns used
+        by deep part of the model. All items in the set must be instances of
+        classes derived from `FeatureColumn`.
+      dnn_optimizer: An instance of `tf.keras.optimizers.*` used to apply
+        gradients to the deep part of the model. Can also be a string (one of
+        'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to
+        Adagrad optimizer.
+      dnn_hidden_units: List of hidden units per layer. All layers are fully
+        connected.
+      dnn_activation_fn: Activation function applied to each layer. If None,
+        will use `tf.nn.relu`.
+      dnn_dropout: When not None, the probability we will drop out a given
+        coordinate.
+      label_dimension: Number of regression targets per example. This is the
+        size of the last dimension of the labels and logits `Tensor` objects
+        (typically, these have shape `[batch_size, label_dimension]`).
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      config: RunConfig object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights are warm-started, and it is assumed that vocabularies and Tensor
+        names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      batch_norm: Whether to use batch normalization after each hidden layer.
+      linear_sparse_combiner: A string specifying how to reduce the linear model
+        if a categorical column is multivalent.  One of "mean", "sqrtn", and
+        "sum" -- these are effectively different ways to do example-level
+        normalization, which can be useful for bag-of-words features.  For more
+        details, see `tf.feature_column.linear_model`.
+
+    Raises:
+      ValueError: If both linear_feature_columns and dnn_features_columns are
+        empty at the same time.
+    """
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+
+    head = regression_head.RegressionHead(
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set(
+        'DNNLinearCombined')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          config=config,
+          batch_norm=batch_norm,
+          linear_sparse_combiner=linear_sparse_combiner)
+
+    super(DNNLinearCombinedRegressorV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.DNNLinearCombinedRegressor'])  # pylint: disable=missing-docstring
+class DNNLinearCombinedRegressor(estimator.Estimator):
+  __doc__ = DNNLinearCombinedRegressorV2.__doc__.replace(
+      'SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               model_dir=None,
+               linear_feature_columns=None,
+               linear_optimizer='Ftrl',
+               dnn_feature_columns=None,
+               dnn_optimizer='Adagrad',
+               dnn_hidden_units=None,
+               dnn_activation_fn=tf.nn.relu,
+               dnn_dropout=None,
+               label_dimension=1,
+               weight_column=None,
+               input_layer_partitioner=None,
+               config=None,
+               warm_start_from=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               batch_norm=False,
+               linear_sparse_combiner='sum'):
+    self._feature_columns = _validate_feature_columns(
+        linear_feature_columns=linear_feature_columns,
+        dnn_feature_columns=dnn_feature_columns)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set(
+        'DNNLinearCombined')  # pylint: disable=protected-access
+
+    head = head_lib._regression_head(  # pylint: disable=protected-access
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      """Call the _dnn_linear_combined_model_fn."""
+      return _dnn_linear_combined_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          linear_feature_columns=linear_feature_columns,
+          linear_optimizer=linear_optimizer,
+          dnn_feature_columns=dnn_feature_columns,
+          dnn_optimizer=dnn_optimizer,
+          dnn_hidden_units=dnn_hidden_units,
+          dnn_activation_fn=dnn_activation_fn,
+          dnn_dropout=dnn_dropout,
+          input_layer_partitioner=input_layer_partitioner,
+          config=config,
+          batch_norm=batch_norm,
+          linear_sparse_combiner=linear_sparse_combiner)
+
+    super(DNNLinearCombinedRegressor, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_testing_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_testing_utils.py
new file mode 100644
index 00000000..64440ce6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/dnn_testing_utils.py
@@ -0,0 +1,2141 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utils to be used in testing DNN estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import shutil
+import tempfile
+
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.core.framework import summary_pb2
+from tensorflow.python.feature_column import feature_column_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.optimizer_v2 import gradient_descent
+from tensorflow.python.keras.optimizer_v2 import optimizer_v2
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.inputs import numpy_io
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# pylint rules which are disabled by default for test files.
+# pylint: disable=invalid-name,protected-access,missing-docstring
+
+# Names of variables created by model.
+LEARNING_RATE_NAME = 'dnn/regression_head/dnn/learning_rate'
+HIDDEN_WEIGHTS_NAME_PATTERN = 'dnn/hiddenlayer_%d/kernel'
+HIDDEN_BIASES_NAME_PATTERN = 'dnn/hiddenlayer_%d/bias'
+BATCH_NORM_BETA_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/beta'
+BATCH_NORM_GAMMA_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/gamma'
+BATCH_NORM_MEAN_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/moving_mean'
+BATCH_NORM_VARIANCE_NAME_PATTERN = (
+    'dnn/hiddenlayer_%d/batchnorm_%d/moving_variance')
+LOGITS_WEIGHTS_NAME = 'dnn/logits/kernel'
+LOGITS_BIASES_NAME = 'dnn/logits/bias'
+OCCUPATION_EMBEDDING_NAME = ('dnn/input_from_feature_columns/input_layer/'
+                             'occupation_embedding/embedding_weights')
+CITY_EMBEDDING_NAME = ('dnn/input_from_feature_columns/input_layer/'
+                       'city_embedding/embedding_weights')
+
+
+def assert_close(expected, actual, rtol=1e-04, message='', name='assert_close'):
+  with ops.name_scope(name, 'assert_close', (expected, actual, rtol)) as scope:
+    expected = ops.convert_to_tensor(expected, name='expected')
+    actual = ops.convert_to_tensor(actual, name='actual')
+    rdiff = tf.math.abs((expected - actual) / expected, 'diff')
+    rtol = ops.convert_to_tensor(rtol, name='rtol')
+    return tf.compat.v1.debugging.assert_less(
+        rdiff,
+        rtol,
+        data=('Condition expected =~ actual did not hold element-wise:'
+              'expected = ', expected, 'actual = ', actual, 'rdiff = ', rdiff,
+              'rtol = ', rtol,),
+        summarize=expected.get_shape().num_elements(),
+        name=scope)
+
+
+def create_checkpoint(weights_and_biases,
+                      global_step,
+                      model_dir,
+                      batch_norm_vars=None):
+  """Create checkpoint file with provided model weights.
+
+  Args:
+    weights_and_biases: Iterable of tuples of weight and bias values.
+    global_step: Initial global step to save in checkpoint.
+    model_dir: Directory into which checkpoint is saved.
+    batch_norm_vars: Variables used for batch normalization.
+  """
+  weights, biases = zip(*weights_and_biases)
+  if batch_norm_vars:
+    assert len(batch_norm_vars) == len(weights_and_biases) - 1
+    (bn_betas, bn_gammas, bn_means, bn_variances) = zip(*batch_norm_vars)
+  model_weights = {}
+
+  # Hidden layer weights.
+  for i in range(0, len(weights) - 1):
+    model_weights[HIDDEN_WEIGHTS_NAME_PATTERN % i] = weights[i]
+    model_weights[HIDDEN_BIASES_NAME_PATTERN % i] = biases[i]
+    if batch_norm_vars:
+      model_weights[BATCH_NORM_BETA_NAME_PATTERN % (i, i)] = bn_betas[i]
+      model_weights[BATCH_NORM_GAMMA_NAME_PATTERN % (i, i)] = bn_gammas[i]
+      model_weights[BATCH_NORM_MEAN_NAME_PATTERN % (i, i)] = bn_means[i]
+      model_weights[BATCH_NORM_VARIANCE_NAME_PATTERN % (i, i)] = bn_variances[i]
+
+  # Output layer weights.
+  model_weights[LOGITS_WEIGHTS_NAME] = weights[-1]
+  model_weights[LOGITS_BIASES_NAME] = biases[-1]
+
+  with tf.Graph().as_default():
+    # Create model variables.
+    for k, v in six.iteritems(model_weights):
+      tf.Variable(v, name=k, dtype=tf.dtypes.float32)
+
+    # Create non-model variables.
+    global_step_var = tf.compat.v1.train.create_global_step()
+
+    # Initialize vars and save checkpoint.
+    with tf.compat.v1.Session() as sess:
+      tf.compat.v1.initializers.global_variables().run()
+      global_step_var.assign(global_step).eval()
+      tf.compat.v1.train.Saver().save(sess,
+                                      os.path.join(model_dir, 'model.ckpt'))
+
+
+def mock_head(testcase, hidden_units, logits_dimension, expected_logits):
+  """Returns a mock head that validates logits values and variable names."""
+  hidden_weights_names = [(HIDDEN_WEIGHTS_NAME_PATTERN + ':0') % i
+                          for i in range(len(hidden_units))]
+  hidden_biases_names = [
+      (HIDDEN_BIASES_NAME_PATTERN + ':0') % i for i in range(len(hidden_units))
+  ]
+  expected_var_names = (
+      hidden_weights_names + hidden_biases_names +
+      [LOGITS_WEIGHTS_NAME + ':0', LOGITS_BIASES_NAME + ':0'])
+
+  def _create_tpu_estimator_spec(features,
+                                 mode,
+                                 logits,
+                                 labels,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 optimizer=None,
+                                 update_ops=None):
+    del features, labels  # Not used.
+    trainable_vars = tf.compat.v1.get_collection(
+        tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
+    testcase.assertItemsEqual(expected_var_names,
+                              [var.name for var in trainable_vars])
+    loss = tf.constant(1.)
+    assert_logits = assert_close(
+        expected_logits, logits, message='Failed for mode={}. '.format(mode))
+    with tf.control_dependencies([assert_logits]):
+      if mode == ModeKeys.TRAIN:
+        if train_op_fn is not None:
+          train_op = train_op_fn(loss)
+        elif optimizer is not None:
+          train_op = optimizer.get_updates(loss, trainable_variables)
+        if update_ops is not None:
+          train_op = tf.group(train_op, *update_ops)
+        return model_fn._TPUEstimatorSpec(
+            mode=mode, loss=loss, train_op=train_op)
+      elif mode == ModeKeys.EVAL:
+        return model_fn._TPUEstimatorSpec(mode=mode, loss=tf.identity(loss))
+      elif mode == ModeKeys.PREDICT:
+        return model_fn._TPUEstimatorSpec(
+            mode=mode, predictions={'logits': tf.identity(logits)})
+      else:
+        testcase.fail('Invalid mode: {}'.format(mode))
+
+  def _create_estimator_spec(features,
+                             mode,
+                             logits,
+                             labels,
+                             trainable_variables=None,
+                             train_op_fn=None,
+                             optimizer=None,
+                             update_ops=None):
+    tpu_spec = _create_tpu_estimator_spec(features, mode, logits, labels,
+                                          trainable_variables, train_op_fn,
+                                          optimizer, update_ops)
+    return tpu_spec.as_estimator_spec()
+
+  head = tf.compat.v1.test.mock.NonCallableMagicMock(spec=base_head.Head)
+  head.logits_dimension = logits_dimension
+  head._create_tpu_estimator_spec = tf.compat.v1.test.mock.MagicMock(
+      wraps=_create_tpu_estimator_spec)
+  head.create_estimator_spec = tf.compat.v1.test.mock.MagicMock(
+      wraps=_create_estimator_spec)
+
+  return head
+
+
+def mock_optimizer(testcase, hidden_units, expected_loss=None):
+  """Creates a mock optimizer to test the train method.
+
+  Args:
+    testcase: A TestCase instance.
+    hidden_units: Iterable of integer sizes for the hidden layers.
+    expected_loss: If given, will assert the loss value.
+
+  Returns:
+    A mock Optimizer.
+  """
+  hidden_weights_names = [(HIDDEN_WEIGHTS_NAME_PATTERN + ':0') % i
+                          for i in range(len(hidden_units))]
+  hidden_biases_names = [
+      (HIDDEN_BIASES_NAME_PATTERN + ':0') % i for i in range(len(hidden_units))
+  ]
+  expected_var_names = (
+      hidden_weights_names + hidden_biases_names +
+      [LOGITS_WEIGHTS_NAME + ':0', LOGITS_BIASES_NAME + ':0'])
+
+  class _Optimizer(optimizer_v2.OptimizerV2):
+
+    def get_updates(self, loss, params):
+      trainable_vars = params
+      testcase.assertItemsEqual(expected_var_names,
+                                [var.name for var in trainable_vars])
+
+      # Verify loss. We can't check the value directly, so we add an assert op.
+      testcase.assertEquals(0, loss.shape.ndims)
+      if expected_loss is None:
+        if self.iterations is not None:
+          return [self.iterations.assign_add(1).op]
+        return [tf.no_op()]
+      assert_loss = assert_close(
+          tf.cast(expected_loss, name='expected', dtype=tf.dtypes.float32),
+          loss,
+          name='assert_loss')
+      with tf.control_dependencies((assert_loss,)):
+        if self.iterations is not None:
+          return [self.iterations.assign_add(1).op]
+        return [tf.no_op()]
+
+    def get_config(self):
+      config = super(_Optimizer, self).get_config()
+      return config
+
+  optimizer = _Optimizer(name='my_optimizer')
+
+  return optimizer
+
+
+class BaseDNNModelFnTest(object):
+  """Tests that _dnn_model_fn passes expected logits to mock head."""
+
+  def __init__(self, dnn_model_fn, fc_impl=feature_column_v2):
+    self._dnn_model_fn = dnn_model_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_logits(self, mode, hidden_units, logits_dimension, inputs,
+                   expected_logits):
+    """Tests that the expected logits are passed to mock head."""
+    with tf.Graph().as_default():
+      tf.compat.v1.train.create_global_step()
+      head = mock_head(
+          self,
+          hidden_units=hidden_units,
+          logits_dimension=logits_dimension,
+          expected_logits=expected_logits)
+      estimator_spec = self._dnn_model_fn(
+          features={'age': tf.constant(inputs)},
+          labels=tf.constant([[1]]),
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=[
+              self._fc_impl.numeric_column(
+                  'age', shape=np.array(inputs).shape[1:])
+          ],
+          optimizer=mock_optimizer(self, hidden_units))
+      with tf.compat.v1.train.MonitoredTrainingSession(
+          checkpoint_dir=self._model_dir) as sess:
+        if mode == ModeKeys.TRAIN:
+          sess.run(estimator_spec.train_op)
+        elif mode == ModeKeys.EVAL:
+          sess.run(estimator_spec.loss)
+        elif mode == ModeKeys.PREDICT:
+          sess.run(estimator_spec.predictions)
+        else:
+          self.fail('Invalid mode: {}'.format(mode))
+
+  def test_one_dim_logits(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +1*0 +0.3]] = [[-2.08]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10.]],
+          expected_logits=[[-2.08]])
+
+  def test_multi_dim_logits(self):
+    """Tests multi-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38]]
+           = [[-2.08, 2.08, 1.19]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.]],
+          expected_logits=[[-2.08, 2.08, 1.19]])
+
+  def test_multi_example_multi_dim_logits(self):
+    """Tests multiple examples and multi-dimensional logits.
+
+    input_layer = [[10], [5]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)],
+                      [relu(0.6*5 +0.1), relu(0.5*5 -0.1)]]
+                   = [[6.1, 4.9], [3.1, 2.4]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)],
+                      [relu(1*3.1 -0.8*2.4 +0.2), relu(0.8*3.1 -1*2.4 -0.1)]]
+                   = [[2.38, 0], [1.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38],
+              [-1*1.38 +0.3, 1*1.38 -0.3, 0.5*1.38]]
+           = [[-2.08, 2.08, 1.19], [-1.08, 1.08, 0.69]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.], [5.]],
+          expected_logits=[[-2.08, 2.08, 1.19], [-1.08, 1.08, .69]])
+
+  def test_multi_dim_input_one_dim_logits(self):
+    """Tests multi-dimensional inputs and one-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 +1*0 +0.3]] = [[-0.48]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48]])
+
+  def test_multi_dim_input_multi_dim_logits(self):
+    """Tests multi-dimensional inputs and multi-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 + 0.3, 1*0.78 -0.3, 0.5*0.78]] = [[-0.48, 0.48, 0.39]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48, 0.48, 0.39]])
+
+  def test_multi_feature_column_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        tf.compat.v1.train.create_global_step()
+        head = mock_head(
+            self,
+            hidden_units=hidden_units,
+            logits_dimension=logits_dimension,
+            expected_logits=expected_logits)
+        estimator_spec = self._dnn_model_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            labels=tf.constant([[1]]),
+            mode=mode,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column('age'),
+                self._fc_impl.numeric_column('height')
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          if mode == ModeKeys.TRAIN:
+            sess.run(estimator_spec.train_op)
+          elif mode == ModeKeys.EVAL:
+            sess.run(estimator_spec.loss)
+          elif mode == ModeKeys.PREDICT:
+            sess.run(estimator_spec.predictions)
+          else:
+            self.fail('Invalid mode: {}'.format(mode))
+
+  def test_multi_feature_column_mix_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        tf.compat.v1.train.create_global_step()
+        head = mock_head(
+            self,
+            hidden_units=hidden_units,
+            logits_dimension=logits_dimension,
+            expected_logits=expected_logits)
+        estimator_spec = self._dnn_model_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            labels=tf.constant([[1]]),
+            mode=mode,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                tf.feature_column.numeric_column('age'),
+                tf.feature_column.numeric_column('height')
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          if mode == ModeKeys.TRAIN:
+            sess.run(estimator_spec.train_op)
+          elif mode == ModeKeys.EVAL:
+            sess.run(estimator_spec.loss)
+          elif mode == ModeKeys.PREDICT:
+            sess.run(estimator_spec.predictions)
+          else:
+            self.fail('Invalid mode: {}'.format(mode))
+
+  def test_features_tensor_raises_value_error(self):
+    """Tests that passing a Tensor for features raises a ValueError."""
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[0, 0, 0]]
+
+    with tf.Graph().as_default():
+      tf.compat.v1.train.create_global_step()
+      head = mock_head(
+          self,
+          hidden_units=hidden_units,
+          logits_dimension=logits_dimension,
+          expected_logits=expected_logits)
+      with self.assertRaisesRegexp(ValueError, 'features should be a dict'):
+        self._dnn_model_fn(
+            features=tf.constant(inputs),
+            labels=tf.constant([[1]]),
+            mode=ModeKeys.TRAIN,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column(
+                    'age', shape=np.array(inputs).shape[1:])
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+
+
+class BaseDNNLogitFnTest(object):
+  """Tests correctness of logits calculated from _dnn_logit_fn_builder."""
+
+  def __init__(self, dnn_logit_fn_builder, fc_impl=feature_column_v2):
+    self._dnn_logit_fn_builder = dnn_logit_fn_builder
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_logits(self,
+                   mode,
+                   hidden_units,
+                   logits_dimension,
+                   inputs,
+                   expected_logits,
+                   batch_norm=False):
+    """Tests that the expected logits are calculated."""
+    with tf.Graph().as_default():
+      # Global step needed for MonitoredSession, which is in turn used to
+      # explicitly set variable weights through a checkpoint.
+      tf.compat.v1.train.create_global_step()
+      logit_fn = self._dnn_logit_fn_builder(
+          units=logits_dimension,
+          hidden_units=hidden_units,
+          feature_columns=[
+              self._fc_impl.numeric_column(
+                  'age', shape=np.array(inputs).shape[1:])
+          ],
+          activation_fn=tf.nn.relu,
+          dropout=None,
+          batch_norm=batch_norm)
+      logits = logit_fn(features={'age': tf.constant(inputs)}, mode=mode)
+      with tf.compat.v1.train.MonitoredTrainingSession(
+          checkpoint_dir=self._model_dir) as sess:
+        self.assertAllClose(expected_logits, sess.run(logits))
+
+  def test_one_dim_logits(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +1*0 +0.3]] = [[-2.08]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10.]],
+          expected_logits=[[-2.08]])
+
+  def test_one_dim_logits_with_batch_norm(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +1), relu(0.5*10 -1)]] = [[7, 4]]
+    hidden_layer_0 = [[relu(0.6*20 +1), relu(0.5*20 -1)]] = [[13, 9]]
+
+    batch_norm_0, training (epsilon = 0.001):
+      mean1 = 1/2*(7+13) = 10,
+      variance1 = 1/2*(3^2+3^2) = 9
+      x11 = (7-10)/sqrt(9+0.001) = -0.999944449,
+      x21 = (13-10)/sqrt(9+0.001) = 0.999944449,
+
+      mean2 = 1/2*(4+9) = 6.5,
+      variance2 = 1/2*(2.5^2+.2.5^2) = 6.25
+      x12 = (4-6.5)/sqrt(6.25+0.001) = -0.99992001,
+      x22 = (9-6.5)/sqrt(6.25+0.001) = 0.99992001,
+
+    logits = [[-1*(-0.999944449) + 2*(-0.99992001) + 0.3],
+              [-1*0.999944449 + 2*0.99992001 + 0.3]]
+           = [[-0.699895571],[1.299895571]]
+
+    batch_norm_0, not training (epsilon = 0.001):
+      moving_mean1 = 0, moving_variance1 = 1
+      x11 = (7-0)/sqrt(1+0.001) = 6.996502623,
+      x21 = (13-0)/sqrt(1+0.001) = 12.993504871,
+      moving_mean2 = 0, moving_variance2 = 1
+      x12 = (4-0)/sqrt(1+0.001) = 3.998001499,
+      x22 = (9-0)/sqrt(1+0.001) = 8.995503372,
+
+    logits = [[-1*6.996502623 + 2*3.998001499 + 0.3],
+              [-1*12.993504871 + 2*8.995503372 + 0.3]]
+           = [[1.299500375],[5.297501873]]
+    """
+    base_global_step = 100
+    create_checkpoint(
+        (
+            ([[.6, .5]], [1., -1.]),
+            ([[-1.], [2.]], [.3]),
+        ),
+        base_global_step,
+        self._model_dir,
+        batch_norm_vars=(
+            [
+                [0, 0],  # beta.
+                [1, 1],  # gamma.
+                [0, 0],  # moving mean.
+                [1, 1],  # moving variance.
+            ],))
+    self._test_logits(
+        ModeKeys.TRAIN,
+        hidden_units=[2],
+        logits_dimension=1,
+        inputs=[[10.], [20.]],
+        expected_logits=[[-0.699895571], [1.299895571]],
+        batch_norm=True)
+    for mode in [ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=[2],
+          logits_dimension=1,
+          inputs=[[10.], [20.]],
+          expected_logits=[[1.299500375], [5.297501873]],
+          batch_norm=True)
+
+  def test_multi_dim_logits(self):
+    """Tests multi-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38]]
+           = [[-2.08, 2.08, 1.19]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.]],
+          expected_logits=[[-2.08, 2.08, 1.19]])
+
+  def test_multi_example_multi_dim_logits(self):
+    """Tests multiple examples and multi-dimensional logits.
+
+    input_layer = [[10], [5]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)],
+                      [relu(0.6*5 +0.1), relu(0.5*5 -0.1)]]
+                   = [[6.1, 4.9], [3.1, 2.4]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)],
+                      [relu(1*3.1 -0.8*2.4 +0.2), relu(0.8*3.1 -1*2.4 -0.1)]]
+                   = [[2.38, 0], [1.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38],
+              [-1*1.38 +0.3, 1*1.38 -0.3, 0.5*1.38]]
+           = [[-2.08, 2.08, 1.19], [-1.08, 1.08, 0.69]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.], [5.]],
+          expected_logits=[[-2.08, 2.08, 1.19], [-1.08, 1.08, .69]])
+
+  def test_multi_dim_input_one_dim_logits(self):
+    """Tests multi-dimensional inputs and one-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 +1*0 +0.3]] = [[-0.48]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48]])
+
+  def test_multi_dim_input_multi_dim_logits(self):
+    """Tests multi-dimensional inputs and multi-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 + 0.3, 1*0.78 -0.3, 0.5*0.78]] = [[-0.48, 0.48, 0.39]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48, 0.48, 0.39]])
+
+  def test_multi_feature_column_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        # Global step needed for MonitoredSession, which is in turn used to
+        # explicitly set variable weights through a checkpoint.
+        tf.compat.v1.train.create_global_step()
+        logit_fn = self._dnn_logit_fn_builder(
+            units=logits_dimension,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column('age'),
+                self._fc_impl.numeric_column('height')
+            ],
+            activation_fn=tf.nn.relu,
+            dropout=None,
+            batch_norm=False)
+        logits = logit_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            mode=mode)
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          self.assertAllClose(expected_logits, sess.run(logits))
+
+  def test_multi_feature_column_mix_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        # Global step needed for MonitoredSession, which is in turn used to
+        # explicitly set variable weights through a checkpoint.
+        tf.compat.v1.train.create_global_step()
+        logit_fn = self._dnn_logit_fn_builder(
+            units=logits_dimension,
+            hidden_units=hidden_units,
+            feature_columns=[
+                tf.feature_column.numeric_column('age'),
+                tf.feature_column.numeric_column('height')
+            ],
+            activation_fn=tf.nn.relu,
+            dropout=None,
+            batch_norm=False)
+        logits = logit_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            mode=mode)
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          self.assertAllClose(expected_logits, sess.run(logits))
+
+
+class BaseDNNWarmStartingTest(object):
+
+  def __init__(self,
+               _dnn_classifier_fn,
+               _dnn_regressor_fn,
+               fc_impl=feature_column_v2):
+    self._dnn_classifier_fn = _dnn_classifier_fn
+    self._dnn_regressor_fn = _dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    # Create a directory to save our old checkpoint and vocabularies to.
+    self._ckpt_and_vocab_dir = tempfile.mkdtemp()
+    # Reset the default graph in each test method to avoid the Keras optimizer
+    # naming issue during warm starting.
+    tf.compat.v1.reset_default_graph()
+
+    # Make a dummy input_fn.
+    def _input_fn():
+      features = {
+          'city': [['Palo Alto'], ['Mountain View']],
+          'locality': [['Palo Alto'], ['Mountain View']],
+          'occupation': [['doctor'], ['consultant']]
+      }
+      return features, [0, 1]
+
+    self._input_fn = _input_fn
+
+  def tearDown(self):
+    # Clean up checkpoint / vocab dir.
+    tf.compat.v1.summary.FileWriterCache.clear()
+    shutil.rmtree(self._ckpt_and_vocab_dir)
+
+  def assertAllNotClose(self, t1, t2):
+    """Helper assert for arrays."""
+    sum_of_abs_diff = 0.0
+    for x, y in zip(t1, t2):
+      try:
+        for a, b in zip(x, y):
+          sum_of_abs_diff += abs(b - a)
+      except TypeError:
+        sum_of_abs_diff += abs(y - x)
+    self.assertGreater(sum_of_abs_diff, 0)
+
+  def test_classifier_basic_warm_starting(self):
+    """Tests correctness of DNNClassifier default warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=dnn_classifier.model_dir)
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      # Learning rate is also checkpointed in V2 optimizer. So we need to make
+      # sure it uses the new value after warm started.
+      if 'learning_rate' in variable_name:
+        self.assertAllClose(
+            0.0, warm_started_dnn_classifier.get_variable_value(variable_name))
+      else:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_regressor_basic_warm_starting(self):
+    """Tests correctness of DNNRegressor default warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNRegressor and train to save a checkpoint.
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        optimizer='SGD')
+    dnn_regressor.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNRegressor, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=dnn_regressor.model_dir)
+
+    warm_started_dnn_regressor.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_regressor.get_variable_names():
+      # Learning rate is also checkpointed in V2 optimizer. So we need to make
+      # sure it uses the new value after warm started.
+      if 'learning_rate' in variable_name:
+        self.assertAllClose(
+            0.0, warm_started_dnn_regressor.get_variable_value(variable_name))
+      else:
+        self.assertAllClose(
+            dnn_regressor.get_variable_value(variable_name),
+            warm_started_dnn_regressor.get_variable_value(variable_name))
+
+  def test_warm_starting_selective_variables(self):
+    """Tests selecting variables to warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        # The provided regular expression will only warm-start the city
+        # embedding, not the kernels and biases of the hidden weights.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            vars_to_warm_start='.*(city).*'))
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'city' in variable_name:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+      elif 'bias' in variable_name:
+        # Hidden layer biases are zero-initialized.
+        bias_values = warm_started_dnn_classifier.get_variable_value(
+            variable_name)
+        self.assertAllClose(np.zeros_like(bias_values), bias_values)
+      elif 'kernel' in variable_name:
+        # We can't override the glorot uniform initializer used for the kernels
+        # in the dense layers, so just make sure we're not getting the same
+        # values from the old checkpoint.
+        self.assertAllNotClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_warm_starting_with_vocab_remapping(self):
+    """Tests warm-starting with vocab remapping."""
+    vocab_list = ['doctor', 'lawyer', 'consultant']
+    vocab_file = os.path.join(self._ckpt_and_vocab_dir, 'occupation_vocab')
+    with open(vocab_file, 'w') as f:
+      f.write('\n'.join(vocab_list))
+    occupation = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_file(
+            'occupation',
+            vocabulary_file=vocab_file,
+            vocabulary_size=len(vocab_list)),
+        dimension=2)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[occupation],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).  Use a new FeatureColumn with a
+    # different vocabulary for occupation.
+    new_vocab_list = ['doctor', 'consultant', 'engineer']
+    new_vocab_file = os.path.join(self._ckpt_and_vocab_dir,
+                                  'new_occupation_vocab')
+    with open(new_vocab_file, 'w') as f:
+      f.write('\n'.join(new_vocab_list))
+    new_occupation = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_file(
+            'occupation',
+            vocabulary_file=new_vocab_file,
+            vocabulary_size=len(new_vocab_list)),
+        dimension=2)
+    # We can create our VocabInfo object from the new and old occupation
+    # FeatureColumn's.
+    occupation_vocab_info = estimator.VocabInfo(
+        new_vocab=new_occupation.categorical_column.vocabulary_file,
+        new_vocab_size=new_occupation.categorical_column.vocabulary_size,
+        num_oov_buckets=new_occupation.categorical_column.num_oov_buckets,
+        old_vocab=occupation.categorical_column.vocabulary_file,
+        old_vocab_size=occupation.categorical_column.vocabulary_size,
+        # Can't use constant_initializer with load_and_remap.  In practice,
+        # use a truncated normal initializer.
+        backup_initializer=tf.compat.v1.initializers.random_uniform(
+            minval=0.39, maxval=0.39))
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[occupation],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            var_name_to_vocab_info={
+                OCCUPATION_EMBEDDING_NAME: occupation_vocab_info
+            },
+            # Explicitly providing None here will only warm-start variables
+            # referenced in var_name_to_vocab_info (no hidden weights will be
+            # warmstarted).
+            vars_to_warm_start=None))
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    # 'doctor' was ID-0 and still ID-0.
+    self.assertAllClose(
+        dnn_classifier.get_variable_value(OCCUPATION_EMBEDDING_NAME)[0, :],
+        warm_started_dnn_classifier.get_variable_value(
+            OCCUPATION_EMBEDDING_NAME)[0, :])
+    # 'consultant' was ID-2 and now ID-1.
+    self.assertAllClose(
+        dnn_classifier.get_variable_value(OCCUPATION_EMBEDDING_NAME)[2, :],
+        warm_started_dnn_classifier.get_variable_value(
+            OCCUPATION_EMBEDDING_NAME)[1, :])
+    # 'engineer' is a new entry and should be initialized with the
+    # backup_initializer in VocabInfo.
+    self.assertAllClose([0.39] * 2,
+                        warm_started_dnn_classifier.get_variable_value(
+                            OCCUPATION_EMBEDDING_NAME)[2, :])
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'bias' in variable_name:
+        # Hidden layer biases are zero-initialized.
+        bias_values = warm_started_dnn_classifier.get_variable_value(
+            variable_name)
+        self.assertAllClose(np.zeros_like(bias_values), bias_values)
+      elif 'kernel' in variable_name:
+        # We can't override the glorot uniform initializer used for the kernels
+        # in the dense layers, so just make sure we're not getting the same
+        # values from the old checkpoint.
+        self.assertAllNotClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_warm_starting_with_naming_change(self):
+    """Tests warm-starting with a Tensor name remapping."""
+    locality = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'locality', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[locality],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        # The 'city' variable correspond to the 'locality' variable in the
+        # previous model.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            var_name_to_prev_var_name={
+                CITY_EMBEDDING_NAME:
+                    CITY_EMBEDDING_NAME.replace('city', 'locality')
+            }))
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'city' in variable_name:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(
+                CITY_EMBEDDING_NAME.replace('city', 'locality')),
+            warm_started_dnn_classifier.get_variable_value(CITY_EMBEDDING_NAME))
+      # Learning rate is also checkpointed in V2 optimizer. So we need to make
+      # sure it uses the new value after warm started.
+      elif 'learning_rate' in variable_name:
+        self.assertAllClose(
+            0.0, warm_started_dnn_classifier.get_variable_value(variable_name))
+      else:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+
+class BaseDNNClassifierEvaluateTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column_v2):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts evaluation metrics for one-dimensional input and logits."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10.], [10.]]}, [[1], [0]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08], [-2.08]] =>
+    # logistic = 1/(1 + exp(-logits)) = [[0.11105597], [0.11105597]]
+    # loss = (-1. * log(0.111) -1. * log(0.889) = 2.31544200) / 2
+    expected_loss = 1.157721
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS:
+                expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN:
+                expected_loss,
+            metric_keys.MetricKeys.ACCURACY:
+                0.5,
+            metric_keys.MetricKeys.PRECISION:
+                0.0,
+            metric_keys.MetricKeys.RECALL:
+                0.0,
+            metric_keys.MetricKeys.PREDICTION_MEAN:
+                0.11105597,
+            metric_keys.MetricKeys.LABEL_MEAN:
+                0.5,
+            metric_keys.MetricKeys.ACCURACY_BASELINE:
+                0.5,
+            # There is no good way to calculate AUC for only two data points.
+            # But that is what the algorithm returns.
+            metric_keys.MetricKeys.AUC:
+                0.5,
+            metric_keys.MetricKeys.AUC_PR:
+                0.5,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP:
+                global_step
+        },
+        dnn_classifier.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    n_classes = 3
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10., 8.], [10., 8.]]}, [[1], [0]]
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39], [-0.48, 0.48, 0.39]]
+    # probabilities = exp(logits)/sum(exp(logits))
+    #               = [[0.16670536, 0.43538380, 0.39791084],
+    #                  [0.16670536, 0.43538380, 0.39791084]]
+    # loss = -log(0.43538380) - log(0.16670536)
+    expected_loss = 2.62305466 / 2  # batch size
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+            metric_keys.MetricKeys.ACCURACY: 0.5,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_classifier.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_float_labels(self):
+    """Asserts evaluation metrics for float labels in binary classification."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10.], [10.]]}, [[0.8], [0.4]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08], [-2.08]] =>
+    # logistic = 1/(1 + exp(-logits)) = [[0.11105597], [0.11105597]]
+    # loss = (-0.8 * log(0.111) -0.2 * log(0.889)
+    #        -0.4 * log(0.111) -0.6 * log(0.889)) / 2 = 2.7314420 / 2
+    expected_loss = 1.365721
+    metrics = dnn_classifier.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(expected_loss, metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_multi_dim_weights(self):
+    """Tests evaluation with weights."""
+    # Uses same checkpoint with test_multi_dims
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    n_classes = 3
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        n_classes=n_classes,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10., 8.], [10., 8.]], 'w': [[10.], [100.]]}, [[1], [0]]
+
+    # Uses identical numbers as test_multi_dims
+    # See that test for calculation of logits.
+    # loss = (-log(0.43538380)*10 - log(0.16670536)*100) / 2
+    expected_loss = 93.734
+    metrics = dnn_classifier.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(
+        expected_loss, metrics[metric_keys.MetricKeys.LOSS], places=3)
+
+
+class BaseDNNRegressorEvaluateTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column_v2):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts evaluation metrics for one-dimensional input and logits."""
+    # Create checkpoint: num_inputs=1, hidden_units=(2, 2), num_outputs=1.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10.]]}, [[1.]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08]] => predictions = [-2.08].
+    # loss = (1+2.08)^2 = 9.4864
+    expected_loss = 9.4864
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+            metric_keys.MetricKeys.PREDICTION_MEAN: -2.08,
+            metric_keys.MetricKeys.LABEL_MEAN: 1.0,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_regressor.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    # Create checkpoint: num_inputs=2, hidden_units=(2, 2), num_outputs=3.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    label_dimension = 3
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10., 8.]]}, [[1., -1., 0.5]]
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]]
+    # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
+    # expected_loss = loss / 3
+    expected_loss = 1.4643
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 0.39 / 3.0,
+            metric_keys.MetricKeys.LABEL_MEAN: 0.5 / 3.0,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_regressor.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim_weights(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    # same checkpoint with test_multi_dim.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    label_dimension = 3
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        label_dimension=label_dimension,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10., 8.]], 'w': [10.]}, [[1., -1., 0.5]]
+
+    # Uses identical numbers as test_multi_dim.
+    # See that test for calculation of logits.
+    # loss = 4.3929*10/3
+    expected_loss = 14.643
+    metrics = dnn_regressor.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(
+        expected_loss, metrics[metric_keys.MetricKeys.LOSS], places=3)
+
+
+class BaseDNNClassifierPredictTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column_v2):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_one_dim(self, label_vocabulary, label_output_fn):
+    """Asserts predictions for one-dimensional input and logits."""
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        label_vocabulary=label_vocabulary,
+        feature_columns=(self._fc_impl.numeric_column('x'),),
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] =>
+    # logistic = exp(-2.08)/(1 + exp(-2.08)) = 0.11105597
+    # probabilities = [1-logistic, logistic] = [0.88894403, 0.11105597]
+    # class_ids = argmax(probabilities) = [0]
+    predictions = next(dnn_classifier.predict(input_fn=input_fn))
+    self.assertAllClose([-2.08],
+                        predictions[prediction_keys.PredictionKeys.LOGITS])
+    self.assertAllClose([0.11105597],
+                        predictions[prediction_keys.PredictionKeys.LOGISTIC])
+    self.assertAllClose(
+        [0.88894403, 0.11105597],
+        predictions[prediction_keys.PredictionKeys.PROBABILITIES])
+    self.assertAllClose([0],
+                        predictions[prediction_keys.PredictionKeys.CLASS_IDS])
+    self.assertAllEqual([label_output_fn(0)],
+                        predictions[prediction_keys.PredictionKeys.CLASSES])
+    self.assertAllClose(
+        [0, 1], predictions[prediction_keys.PredictionKeys.ALL_CLASS_IDS])
+    self.assertAllEqual(
+        [label_output_fn(0), label_output_fn(1)],
+        predictions[prediction_keys.PredictionKeys.ALL_CLASSES])
+
+  def test_one_dim_without_label_vocabulary(self):
+    self._test_one_dim(
+        label_vocabulary=None, label_output_fn=lambda x: ('%s' % x).encode())
+
+  def test_one_dim_with_label_vocabulary(self):
+    n_classes = 2
+    self._test_one_dim(
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def _test_multi_dim_with_3_classes(self, label_vocabulary, label_output_fn):
+    """Asserts predictions for multi-dimensional input and logits."""
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x', shape=(2,)),),
+        label_vocabulary=label_vocabulary,
+        n_classes=3,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        # Inputs shape is (batch_size, num_inputs).
+        x={'x': np.array([[10., 8.]])},
+        batch_size=1,
+        shuffle=False)
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-0.48, 0.48, 0.39] =>
+    # probabilities[i] = exp(logits[i]) / sum_j exp(logits[j]) =>
+    # probabilities = [0.16670536, 0.43538380, 0.39791084]
+    # class_ids = argmax(probabilities) = [1]
+    predictions = next(dnn_classifier.predict(input_fn=input_fn))
+    self.assertItemsEqual([
+        prediction_keys.PredictionKeys.LOGITS,
+        prediction_keys.PredictionKeys.PROBABILITIES,
+        prediction_keys.PredictionKeys.CLASS_IDS,
+        prediction_keys.PredictionKeys.CLASSES,
+        prediction_keys.PredictionKeys.ALL_CLASS_IDS,
+        prediction_keys.PredictionKeys.ALL_CLASSES
+    ], six.iterkeys(predictions))
+    self.assertAllClose([-0.48, 0.48, 0.39],
+                        predictions[prediction_keys.PredictionKeys.LOGITS])
+    self.assertAllClose(
+        [0.16670536, 0.43538380, 0.39791084],
+        predictions[prediction_keys.PredictionKeys.PROBABILITIES])
+    self.assertAllEqual([1],
+                        predictions[prediction_keys.PredictionKeys.CLASS_IDS])
+    self.assertAllEqual([label_output_fn(1)],
+                        predictions[prediction_keys.PredictionKeys.CLASSES])
+    self.assertAllEqual(
+        [0, 1, 2], predictions[prediction_keys.PredictionKeys.ALL_CLASS_IDS])
+    self.assertAllEqual(
+        [label_output_fn(0),
+         label_output_fn(1),
+         label_output_fn(2)],
+        predictions[prediction_keys.PredictionKeys.ALL_CLASSES])
+
+  def test_multi_dim_with_3_classes_but_no_label_vocab(self):
+    self._test_multi_dim_with_3_classes(
+        label_vocabulary=None, label_output_fn=lambda x: ('%s' % x).encode())
+
+  def test_multi_dim_with_3_classes_and_label_vocab(self):
+    n_classes = 3
+    self._test_multi_dim_with_3_classes(
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+
+class BaseDNNRegressorPredictTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column_v2):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts predictions for one-dimensional input and logits."""
+    # Create checkpoint: num_inputs=1, hidden_units=(2, 2), num_outputs=1.
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x'),),
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08]] => predictions = [-2.08].
+    self.assertAllClose({
+        prediction_keys.PredictionKeys.PREDICTIONS: [-2.08],
+    }, next(dnn_regressor.predict(input_fn=input_fn)))
+
+  def test_multi_dim(self):
+    """Asserts predictions for multi-dimensional input and logits."""
+    # Create checkpoint: num_inputs=2, hidden_units=(2, 2), num_outputs=3.
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), 100, self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x', shape=(2,)),),
+        label_dimension=3,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        # Inputs shape is (batch_size, num_inputs).
+        x={'x': np.array([[10., 8.]])},
+        batch_size=1,
+        shuffle=False)
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]] => predictions = [-0.48, 0.48, 0.39]
+    self.assertAllClose(
+        {
+            prediction_keys.PredictionKeys.PREDICTIONS: [-0.48, 0.48, 0.39],
+        }, next(dnn_regressor.predict(input_fn=input_fn)))
+
+
+class _SummaryHook(tf.compat.v1.train.SessionRunHook):
+  """Saves summaries every N steps."""
+
+  def __init__(self):
+    self._summaries = []
+
+  def begin(self):
+    self._summary_op = tf.compat.v1.summary.merge_all()
+
+  def before_run(self, run_context):
+    return tf.compat.v1.train.SessionRunArgs({'summary': self._summary_op})
+
+  def after_run(self, run_context, run_values):
+    s = summary_pb2.Summary()
+    s.ParseFromString(run_values.results['summary'])
+    self._summaries.append(s)
+
+  def summaries(self):
+    return tuple(self._summaries)
+
+
+def _assert_checkpoint(testcase, global_step, input_units, hidden_units,
+                       output_units, model_dir):
+  """Asserts checkpoint contains expected variables with proper shapes.
+
+  Args:
+    testcase: A TestCase instance.
+    global_step: Expected global step value.
+    input_units: The dimension of input layer.
+    hidden_units: Iterable of integer sizes for the hidden layers.
+    output_units: The dimension of output layer (logits).
+    model_dir: The model directory.
+  """
+  shapes = {name: shape for (name, shape) in tf.train.list_variables(model_dir)}
+
+  # Global step.
+  testcase.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+  testcase.assertEqual(
+      global_step,
+      tf.train.load_variable(model_dir, tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+  # Hidden layer weights.
+  prev_layer_units = input_units
+  for i in range(len(hidden_units)):
+    layer_units = hidden_units[i]
+    testcase.assertAllEqual((prev_layer_units, layer_units),
+                            shapes[HIDDEN_WEIGHTS_NAME_PATTERN % i])
+    testcase.assertAllEqual((layer_units,),
+                            shapes[HIDDEN_BIASES_NAME_PATTERN % i])
+    prev_layer_units = layer_units
+
+  # Output layer weights.
+  testcase.assertAllEqual((prev_layer_units, output_units),
+                          shapes[LOGITS_WEIGHTS_NAME])
+  testcase.assertAllEqual((output_units,), shapes[LOGITS_BIASES_NAME])
+
+
+def _assert_simple_summary(testcase, expected_values, actual_summary):
+  """Assert summary the specified simple values.
+
+  Args:
+    testcase: A TestCase instance.
+    expected_values: Dict of expected tags and simple values.
+    actual_summary: `summary_pb2.Summary`.
+  """
+  testcase.assertAllClose(
+      expected_values, {
+          v.tag: v.simple_value
+          for v in actual_summary.value
+          if (v.tag in expected_values)
+      })
+
+
+class BaseDNNClassifierTrainTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column_v2):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_from_scratch_with_default_optimizer_binary(self):
+    hidden_units = (2, 2)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_from_scratch_with_default_optimizer_multi_class(self):
+    hidden_units = (2, 2)
+    n_classes = 3
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[2]]), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=n_classes,
+        model_dir=self._model_dir)
+
+  def test_from_scratch_validate_summary(self):
+    hidden_units = (2, 2)
+    opt = mock_optimizer(self, hidden_units=hidden_units)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(num_steps,
+                     dnn_classifier.get_variable_value(opt.iterations.name))
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      summary_keys = [v.tag for v in summary.value]
+      self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
+
+  def test_binary_classification(self):
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => probabilities = [0.889, 0.111]
+    # loss = -1. * log(0.111) = 2.19772100
+    expected_loss = 2.19772100
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(base_global_step + num_steps,
+                     dnn_classifier.get_variable_value(opt.iterations.name))
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              'dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/hiddenlayer_1/fraction_of_zero_values': .5,
+              'dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_binary_classification_float_labels(self):
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => probabilities = [0.889, 0.111]
+    # loss = -0.8 * log(0.111) -0.2 * log(0.889) = 1.7817210
+    expected_loss = 1.7817210
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[0.8]]), steps=num_steps)
+    self.assertEqual(base_global_step + num_steps,
+                     dnn_classifier.get_variable_value(opt.iterations.name))
+
+  def test_multi_class(self):
+    n_classes = 3
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08, 2.08, 1.19] => probabilities = [0.0109, 0.7011, 0.2879]
+    # loss = -1. * log(0.7011) = 0.35505795
+    expected_loss = 0.35505795
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        n_classes=n_classes,
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(base_global_step + num_steps,
+                     dnn_classifier.get_variable_value(opt.iterations.name))
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              'dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/hiddenlayer_1/fraction_of_zero_values': .5,
+              'dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=n_classes,
+        model_dir=self._model_dir)
+
+
+class BaseDNNRegressorTrainTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column_v2):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_from_scratch_with_default_optimizer(self):
+    hidden_units = (2, 2)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((10,),)), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_from_scratch(self):
+    hidden_units = (2, 2)
+    opt = mock_optimizer(self, hidden_units=hidden_units)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((5.,),)),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(num_steps,
+                     dnn_regressor.get_variable_value(opt.iterations.name))
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      summary_keys = [v.tag for v in summary.value]
+      self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
+
+  def test_one_dim(self):
+    """Asserts train loss for one-dimensional input and logits."""
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => predictions = [-2.08]
+    # loss = (1 + 2.08)^2 = 9.4864
+    expected_loss = 9.4864
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1.]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(base_global_step + num_steps,
+                     dnn_regressor.get_variable_value(opt.iterations.name))
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              'dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/hiddenlayer_1/fraction_of_zero_values': 0.5,
+              'dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_multi_dim(self):
+    """Asserts train loss for multi-dimensional input and logits."""
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    input_dimension = 2
+    label_dimension = 3
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]]
+    # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
+    # expected_loss = loss / 3 (batch size)
+    expected_loss = 1.4643
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=[
+            self._fc_impl.numeric_column('age', shape=[input_dimension])
+        ],
+        label_dimension=label_dimension,
+        optimizer=opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': [[10., 8.]]
+        }, [[1., -1., 0.5]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(base_global_step + num_steps,
+                     dnn_regressor.get_variable_value(opt.iterations.name))
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              'dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/hiddenlayer_1/fraction_of_zero_values': 0.5,
+              'dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=input_dimension,
+        hidden_units=hidden_units,
+        output_units=label_dimension,
+        model_dir=self._model_dir)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/head.py
new file mode 100644
index 00000000..3af6df8e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/head.py
@@ -0,0 +1,1715 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Abstractions for the head(s) of a model."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+import collections
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column
+from tensorflow.python.feature_column import feature_column_lib
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import lookup_ops
+from tensorflow.python.ops import string_ops
+from tensorflow.python.ops import weights_broadcast_ops
+from tensorflow.python.util import function_utils
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+_DEFAULT_SERVING_KEY = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+
+# The above default is defined by TF Serving, but these next three are just
+# a local convention without any special meaning.
+_CLASSIFY_SERVING_KEY = 'classification'
+_REGRESS_SERVING_KEY = 'regression'
+_PREDICT_SERVING_KEY = 'predict'
+
+# A LossSpec contains
+# * a scalar `Tensor` representing reduced weighted training loss
+# * a `Tensor` representing the unreduced unweighted loss
+# * a `Tensor` representing the example weights
+# * possibly processed labels (e.g. vocabulary lookup, shape manipulation, etc)
+LossSpec = collections.namedtuple(
+    'LossSpec',
+    ['training_loss', 'unreduced_loss', 'weights', 'processed_labels'])
+
+
+def _summary_key(head_name, val):
+  return '%s/%s' % (val, head_name) if head_name else val
+
+
+def _create_eval_metrics_tuple(fn, kwargs):
+  """Creates TPU eval metrics tuple.
+
+  Helper function to make eval_metric tuple (eval_metric_fn, fn_kwargs) used
+  by `TPUEstimator`. TPUEstimator requires that `eval_metric_fn` take
+  exclusively Tensor arguments. This helper can help create such a function from
+  a more generic function that can take both Tensor and non-Tensor arguments.
+
+  Args:
+    fn: A eval_metric_fn that takes both Tensor and non-Tensor arguments. This
+      function must return a dict of form
+        {'metric name': (metric_tensor, eval_op)}
+    kwargs: Dict of arguments for `fn`.
+
+  Returns:
+    `eval_metric` tuple that can be passed to a `model_fn._TPUEstimatorSpec`.
+  """
+  tensor_kwargs = {}
+  nontensor_kwargs = {}
+  for k, v in six.iteritems(kwargs):
+    if tf.is_tensor(v):
+      tensor_kwargs[k] = v
+    else:
+      nontensor_kwargs[k] = v
+
+  def _fn(**tensors):
+    return fn(**dict(nontensor_kwargs, **tensors))
+
+  return (_fn, tensor_kwargs)
+
+
+class _Head(object):
+  """Interface for the head/top of a model.
+
+  Given logits (or output of a hidden layer), a Head knows how to compute
+  predictions, loss, train_op, metrics and export outputs. It is meant to:
+
+  1. Simplify writing model_fn and to make model_fn more configurable
+  2. Support wide range of machine learning models. Since most heads can work
+     with logits, they can support DNN, RNN, Wide, Wide&Deep,
+     Global objectives, Gradient boosted trees and many other types
+     of machine learning models.
+
+  Common usage:
+  Here is simplified model_fn to build a DNN regression model.
+    ```python
+    def _my_dnn_model_fn(features, labels, mode, params, config=None):
+      # Optionally your callers can pass head to model_fn as a param.
+      head = tf.contrib.estimator.regression_head(...)
+      inputs = tf.feature_column.input_layer(features, ...)
+      hidden_layer0 = tf.layers.dense(
+          inputs, units=1000, activation=tf.nn.relu)
+      hidden_layer1 = tf.layers.dense(
+          hidden_layer0, units=500, activation=tf.nn.relu)
+      logits = tf.layers.dense(
+          hidden_layer1, units=head.logits_dimension, activation=None)
+
+      return head.create_estimator_spec(
+          features=features,
+          labels=labels,
+          mode=mode,
+          logits=logits,
+          optimizer=optimizer)
+    ```
+
+  There are cases where computing and applying gradients can not be meaningfully
+  captured with optimizer or train_op_fn we support (for example, with sync
+  optimizer). In such case, you can take the responsibility on your own. Here is
+  a common use case,
+    ```python
+    estimator_spec = head.create_estimator_spec(
+        features=features,
+        labels=labels,
+        mode=mode,
+        logits=logits,
+        train_op_fn=lambda _: tf.no_op())
+    if mode == ModeKeys.TRAIN:
+      optimizer = ...
+      sync = tf.train.SyncReplicasOptimizer(opt=optimizer, ...)
+      update_op = sync.minimize(
+          estimator_spec.loss, global_step=tf.get_global_step())
+      hooks = [sync.make_session_run_hook(is_chief)]
+      ... update train_op and hooks in EstimatorSpec and return
+    ```
+  """
+  __metaclass__ = abc.ABCMeta
+
+  @abc.abstractproperty
+  def name(self):
+    """The name of this head.
+
+    Returns:
+      A string.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractproperty
+  def logits_dimension(self):
+    """Size of the last dimension of the logits `Tensor`.
+
+    Typically, logits is of shape `[batch_size, logits_dimension]`.
+
+    Returns:
+      The expected size of the `logits` tensor.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractmethod
+  def create_loss(self, features, mode, logits, labels):
+    """Returns a loss Tensor from provided logits.
+
+    This function is designed to be used by framework developers.  Almost all
+    users should use create_estimator_spec(), which calls this internally.
+    `mode` and `features` are most likely not used, but some Head
+    implementations may require them.
+
+    Args:
+      features: Input `dict` of `Tensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` to be used for loss construction.
+      labels: Labels `Tensor`, or `dict` of same.
+
+    Returns:
+      A LossSpec that contains
+      * the scalar `Tensor` representing reduced weighted training loss
+      * the `Tensor` representing the unreduced unweighted loss
+      * the `Tensor` representing the example weights
+      * possibly processed labels (e.g. vocabulary lookup, shape manipulation,
+        etc.)
+
+      To be extendable in the future.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  # TODO(b/65403806): By default, collect regularization_losses from
+  # GraphKeys.REGULARIZATION_LOSSES collection.
+  def create_estimator_spec(self,
+                            features,
+                            mode,
+                            logits,
+                            labels=None,
+                            optimizer=None,
+                            train_op_fn=None,
+                            regularization_losses=None):
+    """Returns `EstimatorSpec` that a model_fn can return.
+
+    Please note that,
+    + All args must be passed via name.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` to be used by the head.
+      labels: Labels `Tensor`, or `dict` of same.
+      optimizer: `Optimizer` instance to optimize the loss in TRAIN mode.
+        Namely, sets `train_op = optimizer.minimize(loss, global_step)`, which
+        updates variables and increments `global_step`.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns an op
+        to optimize the model with the loss in TRAIN mode. Used if `optimizer`
+        is `None`. Exactly one of `train_op_fn` and `optimizer` must be set in
+        TRAIN mode. None is allowed in other modes. If you want to optimize loss
+        yourself you can pass `lambda _: tf.no_op()` and then use
+          EstimatorSpec.loss to compute and apply gradients.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      `EstimatorSpec`.
+    """
+    try:
+      tpu_estimator_spec = (
+          self._create_tpu_estimator_spec(features, mode, logits, labels,
+                                          optimizer, train_op_fn,
+                                          regularization_losses))
+      return tpu_estimator_spec.as_estimator_spec()
+    except NotImplementedError:
+      # Not all subclasses of _Head will have implemented
+      # _create_tpu_estimator_spec. If it is implemented, we can use it to
+      # create our `EstimatorSpec` here.
+      raise NotImplementedError(
+          'Subclasses of _Head must implement `create_estimator_spec()` or '
+          '_create_tpu_estimator_spec().')
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 train_op_fn=None,
+                                 regularization_losses=None):
+    """Returns `model_fn._TPUEstimatorSpec` that a model_fn can return.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` to be used by the head.
+      labels: Labels `Tensor`, or `dict` of same.
+      optimizer: `Optimizer` instance to optimize the loss in TRAIN mode.
+        Namely, sets `train_op = optimizer.minimize(loss, global_step)`, which
+        updates variables and increments `global_step`.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns an op
+        to optimize the model with the loss in TRAIN mode. Used if `optimizer`
+        is `None`. Exactly one of `train_op_fn` and `optimizer` must be set in
+        TRAIN mode. None is allowed in other modes. If you want to optimize loss
+        yourself you can pass `lambda _: tf.no_op()` and then use
+          EstimatorSpec.loss to compute and apply gradients.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec' instance.
+    """
+    raise NotImplementedError(
+        'TPUEstimatorSpec not available for this model head.')
+
+
+def _check_dense_labels_match_logits_and_reshape(labels, logits,
+                                                 expected_labels_dimension):
+  """Checks that labels shape matches logits and reshapes if needed.
+
+  Consider logits of shape [D0, D1, ... DN, logits_dimension]. Then labels
+  shape must be [D0, D1, ... DN, expected_labels_dimension].
+  If expected_labels_dimension=1, labels could be [D0, D1, ... DN] and this
+  method reshapes them to [D0, D1, ... DN, 1].
+
+  Args:
+    labels: labels Tensor.
+    logits: logits Tensor.
+    expected_labels_dimension: Integer.
+
+  Returns:
+    Validated and reshaped labels Tensor.
+  Raises:
+    ValueError: If labels is a SparseTensor.
+    ValueError: If labels shape is statically defined and fails validation.
+    OpError: If labels shape is not statically defined and fails validation.
+  """
+  if labels is None:
+    raise ValueError(
+        'You must provide a labels Tensor. Given: None. '
+        'Suggested troubleshooting steps: Check that your data contain '
+        'your label feature. Check that your input_fn properly parses and '
+        'returns labels.')
+  with ops.name_scope(None, 'labels', (labels, logits)) as scope:
+    labels = tf.compat.v1.convert_to_tensor_or_sparse_tensor(labels)
+    if isinstance(labels, tf.sparse.SparseTensor):
+      raise ValueError(
+          'SparseTensor labels are not supported. '
+          'labels must be a Tensor of shape [D0, D1, ..., DN, %s], '
+          'e.g. [batch_size, %s]. '
+          'Suggested Fix (1): Check the label feature in your data. '
+          'Each example must contain %s value(s). If not, your choice of label '
+          'was probably incorrect. '
+          'Suggested Fix (2): In your input_fn, use '
+          'tf.sparse_tensor_to_dense() to turn labels into a Tensor.'
+          '' % (expected_labels_dimension, expected_labels_dimension,
+                expected_labels_dimension))
+    if (labels.shape.ndims is not None and logits.shape.ndims is not None and
+        labels.shape.ndims == logits.shape.ndims - 1):
+      labels = tf.compat.v1.expand_dims(labels, -1)
+    labels_shape = tf.compat.v1.shape(labels)
+    logits_shape = tf.compat.v1.shape(logits)
+    err_msg = (
+        'labels shape must be [D0, D1, ... DN, {}]. '
+        'Suggested Fix: check your n_classes argument to the estimator '
+        'and/or the shape of your label.'.format(expected_labels_dimension))
+    assert_rank = tf.compat.v1.debugging.assert_rank_at_least(
+        labels, 2, message=err_msg)
+    with tf.control_dependencies([assert_rank]):
+      static_shape = labels.shape
+      if static_shape.ndims is not None:
+        dim1 = static_shape[-1]
+        if (dim1 is not None) and (dim1 != expected_labels_dimension):
+          raise ValueError('Mismatched label shape. '
+                           'Expected labels dimension=%s.  Received %s. '
+                           'Suggested Fix:'
+                           'If your classifier expects one-hot encoding label,'
+                           'check your n_classes argument to the estimator '
+                           'and/or the shape of your label. '
+                           'Otherwise, check the shape of your label.' %
+                           (expected_labels_dimension, dim1))
+      expected_labels_shape = tf.concat(
+          [logits_shape[:-1], [expected_labels_dimension]], axis=0)
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          expected_labels_shape,
+          labels_shape,
+          message=err_msg,
+          data=[
+              'expected_labels_shape: ', expected_labels_shape,
+              'labels_shape: ', labels_shape
+          ])
+      with tf.control_dependencies([assert_dimension]):
+        return tf.identity(labels, name=scope)
+
+
+def _get_weights_and_check_match_logits(features,
+                                        weight_column,
+                                        logits,
+                                        allow_per_logit_weights=False):
+  """Fetches weights from features and checks that the shape matches logits.
+
+  Consider logits of shape [D0, D1, ... DN, logits_dimension]. Weights shape
+  can be either:
+  * [D0, D1, ... DN, logits_dimension] if `allow_per_logit_weights=True`.
+  * [D0, D1, ... DN, 1]
+  * [D0, D1, ... DN]: In this case, weights is reshaped into
+    [D0, D1, ... DN, 1] to work with weight broadcasting rules.
+
+  Args:
+    features: The features dict that contains weights.
+    weight_column: The weight column. If not given, this method returns 1.
+    logits: logits Tensor.
+    allow_per_logit_weights: Boolean. Whether we allow weights along the logits
+      dimension, namely shape `[D0, D1, ... DN, logits_dimension]`.
+
+  Returns:
+    Validated and reshaped weights Tensor.
+  Raises:
+    ValueError: If the weights `Tensor` cannot be cast into float.
+  """
+  if allow_per_logit_weights:
+    err_msg = ('weights shape must be [D0, D1, ... DN], [D0, D1, ... DN, 1] or '
+               '[D0, D1, ... DN, logits_dimension]')
+  else:
+    err_msg = ('weights shape must be [D0, D1, ... DN] or [D0, D1, ... DN, 1]')
+  with ops.name_scope(
+      None, 'weights',
+      values=tuple(six.itervalues(features)) + (logits,)) as scope:
+    # Fetch the weights.
+    if weight_column is None:
+      return 1.
+    if isinstance(weight_column, six.string_types):
+      weight_column = tf.feature_column.numeric_column(
+          key=weight_column, shape=(1,))
+    if not isinstance(
+        weight_column,
+        (feature_column_lib.DenseColumn, feature_column._DenseColumn)):  # pylint: disable=protected-access
+      raise TypeError('Weight column must be either a string or _DenseColumn.'
+                      ' Given type: {}.'.format(type(weight_column)))
+    weights = weight_column._get_dense_tensor(  # pylint: disable=protected-access
+        feature_column._LazyBuilder(features))  # pylint: disable=protected-access
+    if not (weights.dtype.is_floating or weights.dtype.is_integer):
+      raise ValueError('Weight column should be castable to float. '
+                       'Given dtype: {}'.format(weights.dtype))
+    weights = tf.cast(weights, name='weights', dtype=tf.dtypes.float32)
+
+    # Validate the weights shape.
+    weights_shape = tf.compat.v1.shape(weights, name='weights_shape')
+    logits_shape = tf.compat.v1.shape(logits, name='logits_shape')
+    if (weights.shape.ndims is not None and logits.shape.ndims is not None and
+        weights.shape.ndims == logits.shape.ndims - 1):
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          logits_shape[:-1],
+          weights_shape,
+          message=err_msg,
+          data=[
+              'logits_shape: ', logits_shape, 'weights_shape: ', weights_shape
+          ])
+      with tf.control_dependencies([assert_dimension]):
+        return tf.compat.v1.expand_dims(weights, -1, name=scope)
+    supported_weights_shape = tf.concat([logits_shape[:-1], [1]], axis=0)
+    if allow_per_logit_weights:
+      condition = tf.math.reduce_any([
+          tf.reduce_all(tf.math.equal(logits_shape, weights_shape)),
+          tf.reduce_all(tf.math.equal(supported_weights_shape, weights_shape))
+      ])
+      assert_dimension = tf.debugging.Assert(
+          condition=condition,
+          data=[
+              err_msg, 'logits_shape: ', logits_shape, 'weights_shape: ',
+              weights_shape
+          ])
+    else:
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          supported_weights_shape,
+          weights_shape,
+          message=err_msg,
+          data=[
+              'logits_shape: ', logits_shape, 'weights_shape: ', weights_shape
+          ])
+    with tf.control_dependencies([assert_dimension]):
+      return tf.identity(weights, name=scope)
+
+
+def _check_logits_final_dim(logits, expected_logits_dimension):
+  """Checks that logits shape is [D0, D1, ... DN, logits_dimension]."""
+  with ops.name_scope(None, 'logits', (logits,)) as scope:
+    logits = tf.cast(logits, dtype=tf.dtypes.float32)
+    logits_shape = tf.compat.v1.shape(logits)
+    assert_rank = tf.compat.v1.debugging.assert_rank_at_least(
+        logits,
+        2,
+        data=[logits_shape],
+        message='logits shape must be [D0, D1, ... DN, logits_dimension]')
+    with tf.control_dependencies([assert_rank]):
+      static_shape = logits.shape
+      if static_shape.ndims is not None and static_shape[-1] is not None:
+        if (isinstance(expected_logits_dimension, int) and
+            static_shape[-1] != expected_logits_dimension):
+          raise ValueError(
+              'logits shape must be [D0, D1, ... DN, logits_dimension=%s], '
+              'got %s.' % (expected_logits_dimension, static_shape))
+        return logits
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          expected_logits_dimension,
+          logits_shape[-1],
+          data=[logits_shape],
+          message=('logits shape must be [D0, D1, ... DN, '
+                   'logits_dimension=%s]' % (expected_logits_dimension,)))
+      with tf.control_dependencies([assert_dimension]):
+        return tf.identity(logits, name=scope)
+
+
+def _validate_loss_fn_args(loss_fn):
+  """Validates loss_fn arguments.
+
+  Required arguments: labels, logits.
+  Optional arguments: features.
+
+  Args:
+    loss_fn: The loss function.
+
+  Raises:
+    ValueError: If the signature is unexpected.
+  """
+  loss_fn_args = function_utils.fn_args(loss_fn)
+  for required_arg in ['labels', 'logits']:
+    if required_arg not in loss_fn_args:
+      raise ValueError('loss_fn must contain argument: {}. '
+                       'Given arguments: {}'.format(required_arg, loss_fn_args))
+  invalid_args = list(set(loss_fn_args) - set(['labels', 'logits', 'features']))
+  if invalid_args:
+    raise ValueError('loss_fn has unexpected args: {}'.format(invalid_args))
+
+
+def _validate_n_classes(n_classes):
+  """Validates n_classes argument.
+
+  Required arguments: n_classes.
+
+  Args:
+    n_classes: The number of classes.
+
+  Raises:
+    ValueError: If n_classes is <= 2 and n_classes is a Python integer.
+  Returns:
+    n_classes in its original type.
+  """
+  if isinstance(n_classes, int) and (n_classes <= 2):
+    raise ValueError('n_classes must be > 2: %s.' % n_classes)
+
+  n_classes_as_tensor = ops.convert_to_tensor(n_classes)
+  assert_n_classes = tf.compat.v1.debugging.assert_greater(
+      n_classes_as_tensor, 2, message='n_classes must be greater than 2')
+  with tf.control_dependencies([assert_n_classes]):
+    tf.no_op()
+  # Return n_classes in its original type, so that any code
+  # using the accessor logits_dimension() has the original type.
+  return n_classes
+
+
+def _call_loss_fn(loss_fn, labels, logits, features, expected_loss_dim=1):
+  """Calls loss_fn and checks the returned shape.
+
+  Args:
+    loss_fn: The loss function.
+    labels: Processed labels Tensor.
+    logits: Logits Tensor of shape [D0, D1, ... DN, logits_dimension].
+    features: Features dict.
+    expected_loss_dim: The expected last dimension of loss Tensor.
+
+  Returns:
+    Loss Tensor with shape [D0, D1, ... DN, expected_loss_dim].
+  """
+  loss_fn_args = function_utils.fn_args(loss_fn)
+  kwargs = {}
+  if 'features' in loss_fn_args:
+    kwargs['features'] = features
+  with ops.name_scope(
+      None,
+      'call_loss_fn',
+      values=[labels, logits] + list(six.itervalues(features))):
+    unweighted_loss = loss_fn(labels=labels, logits=logits, **kwargs)
+    logits_shape = tf.compat.v1.shape(logits, name='logits_shape')
+    expected_loss_shape = tf.concat([logits_shape[:-1], [expected_loss_dim]],
+                                    axis=0,
+                                    name='expected_loss_shape')
+    loss_shape = tf.compat.v1.shape(unweighted_loss, name='loss_shape')
+    check_loss_shape_op = tf.debugging.Assert(
+        tf.reduce_all(tf.math.equal(loss_shape, expected_loss_shape)),
+        data=[
+            'loss_fn must return Tensor of shape '
+            '[D0, D1, ... DN, {}]. '.format(expected_loss_dim),
+            'logits_shape: ', logits_shape, 'loss_shape: ', loss_shape
+        ],
+        name='check_loss_shape')
+    with tf.control_dependencies([check_loss_shape_op]):
+      return tf.identity(unweighted_loss)
+
+
+def _indicator_labels_mean(labels, weights=None, name=None):
+  with ops.name_scope(name, 'labels_mean', (labels, weights)) as scope:
+    labels = tf.cast(labels, name='labels', dtype=tf.dtypes.float32)
+    if weights is not None:
+      weights = weights_broadcast_ops.broadcast_weights(weights, labels)
+    return tf.compat.v1.metrics.mean(labels, weights=weights, name=scope)
+
+
+def _all_class_ids(logits, n_classes):
+  batch_size = tf.compat.v1.shape(logits)[0]
+  class_id_list = tf.range(n_classes)
+  return tf.tile(
+      input=tf.compat.v1.expand_dims(input=class_id_list, axis=0),
+      multiples=[batch_size, 1])
+
+
+def _all_classes(logits, n_classes, label_vocabulary=None):
+  batch_size = tf.compat.v1.shape(logits)[0]
+  if label_vocabulary:
+    classes_list = label_vocabulary
+  else:
+    classes_list = string_ops.as_string(tf.range(n_classes))
+  return tf.tile(
+      input=tf.compat.v1.expand_dims(input=classes_list, axis=0),
+      multiples=[batch_size, 1])
+
+
+def _classification_output(scores, n_classes, label_vocabulary=None):
+  batch_size = tf.compat.v1.shape(scores)[0]
+  if label_vocabulary:
+    export_class_list = label_vocabulary
+  else:
+    export_class_list = string_ops.as_string(tf.range(n_classes))
+  export_output_classes = tf.tile(
+      input=tf.compat.v1.expand_dims(input=export_class_list, axis=0),
+      multiples=[batch_size, 1])
+  return export_output.ClassificationOutput(
+      scores=scores,
+      # `ClassificationOutput` requires string classes.
+      classes=export_output_classes)
+
+
+def _accuracy_baseline(labels_mean):
+  """Return accuracy baseline based on labels mean.
+
+  This is the best the model could do by always predicting one class.
+
+  Args:
+    labels_mean: Tuple of value and update op.
+
+  Returns:
+    Tuple of value and update op.
+  """
+  with ops.name_scope(None, 'accuracy_baseline', labels_mean):
+    value, update_op = labels_mean
+    return (tf.math.maximum(value, 1. - value, name='value'),
+            tf.math.maximum(update_op, 1 - update_op, name='update_op'))
+
+
+def _predictions_mean(predictions, weights=None, name=None):
+  with ops.name_scope(name, 'predictions_mean',
+                      (predictions, weights)) as scope:
+    predictions = tf.cast(
+        predictions, name='predictions', dtype=tf.dtypes.float32)
+    if weights is not None:
+      weights = weights_broadcast_ops.broadcast_weights(weights, predictions)
+    return tf.compat.v1.metrics.mean(predictions, weights=weights, name=scope)
+
+
+def _auc(labels, predictions, weights=None, curve='ROC', name=None):
+  with ops.name_scope(name, 'auc', (predictions, labels, weights)) as scope:
+    predictions = tf.cast(
+        predictions, name='predictions', dtype=tf.dtypes.float32)
+    if weights is not None:
+      weights = weights_broadcast_ops.broadcast_weights(weights, predictions)
+    return tf.compat.v1.metrics.auc(
+        labels=labels,
+        predictions=predictions,
+        weights=weights,
+        curve=curve,
+        name=scope)
+
+
+def _accuracy_at_threshold(labels, predictions, weights, threshold, name=None):
+  with ops.name_scope(name, 'accuracy_at_%s' % threshold,
+                      (predictions, labels, weights, threshold)) as scope:
+    threshold_predictions = tf.compat.v1.to_float(
+        tf.math.greater_equal(predictions, threshold))
+    return tf.compat.v1.metrics.accuracy(
+        labels=labels,
+        predictions=threshold_predictions,
+        weights=weights,
+        name=scope)
+
+
+def _precision_at_threshold(labels, predictions, weights, threshold, name=None):
+  with ops.name_scope(name, 'precision_at_%s' % threshold,
+                      (predictions, labels, weights, threshold)) as scope:
+    precision_tensor, update_op = tf.compat.v1.metrics.precision_at_thresholds(
+        labels=labels,
+        predictions=predictions,
+        thresholds=(threshold,),
+        weights=weights,
+        name=scope)
+    return tf.compat.v1.squeeze(precision_tensor), tf.compat.v1.squeeze(
+        update_op)
+
+
+def _recall_at_threshold(labels, predictions, weights, threshold, name=None):
+  with ops.name_scope(name, 'recall_at_%s' % threshold,
+                      (predictions, labels, weights, threshold)) as scope:
+    precision_tensor, update_op = tf.compat.v1.metrics.recall_at_thresholds(
+        labels=labels,
+        predictions=predictions,
+        thresholds=(threshold,),
+        weights=weights,
+        name=scope)
+    return tf.compat.v1.squeeze(precision_tensor), tf.compat.v1.squeeze(
+        update_op)
+
+
+def _multi_class_head_with_softmax_cross_entropy_loss(
+    n_classes,
+    weight_column=None,
+    label_vocabulary=None,
+    loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+    loss_fn=None,
+    name=None):
+  """Creates a '_Head' for multi class classification.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, n_classes]`.
+  In many applications, the shape is `[batch_size, n_classes]`.
+
+  `labels` must be a dense `Tensor` with shape matching `logits`, namely
+  `[D0, D1, ... DN, 1]`. If `label_vocabulary` given, `labels` must be a string
+  `Tensor` with values from the vocabulary. If `label_vocabulary` is not given,
+  `labels` must be an integer `Tensor` with values specifying the class index.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, or `[D0, D1, ... DN, 1]`.
+
+  The loss is the weighted sum over the input dimensions. Namely, if the input
+  labels have shape `[batch_size, 1]`, the loss is the weighted sum over
+  `batch_size`.
+
+  Also supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features)` as arguments and returns unreduced loss with
+  shape `[D0, D1, ... DN, 1]`. `loss_fn` must support integer `labels` with
+  shape `[D0, D1, ... DN, 1]`. Namely, the head applies `label_vocabulary` to
+  the input labels before passing them to `loss_fn`.
+
+  Args:
+    n_classes: Number of classes, must be greater than 2 (for 2 classes, use
+      `_BinaryLogisticHeadWithSigmoidCrossEntropyLoss`).
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    label_vocabulary: A list or tuple of strings representing possible label
+      values. If it is not given, that means labels are already encoded as an
+      integer within [0, n_classes). If given, labels must be of string type and
+      have any value in `label_vocabulary`. Note that errors will be raised if
+      `label_vocabulary` is not provided but labels are strings.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how to
+      reduce training loss over batch. Defaults to `SUM`.
+    loss_fn: Optional loss function.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+
+  Returns:
+    An instance of `_Head` for multi class classification.
+
+  Raises:
+    ValueError: If `n_classes`, `label_vocabulary` or `loss_reduction` is
+      invalid.
+  """
+  if label_vocabulary is not None and not isinstance(label_vocabulary,
+                                                     (list, tuple)):
+    raise ValueError(
+        'label_vocabulary should be a list or a tuple. Given type: {}'.format(
+            type(label_vocabulary)))
+  if (loss_reduction not in tf.compat.v1.losses.Reduction.all() or
+      loss_reduction == tf.compat.v1.losses.Reduction.NONE):
+    raise ValueError('Invalid loss_reduction: {}'.format(loss_reduction))
+  if loss_fn:
+    _validate_loss_fn_args(loss_fn)
+  return _MultiClassHeadWithSoftmaxCrossEntropyLoss(
+      n_classes=n_classes,
+      weight_column=weight_column,
+      label_vocabulary=label_vocabulary,
+      loss_reduction=loss_reduction,
+      loss_fn=loss_fn,
+      name=name)
+
+
+class _MultiClassHeadWithSoftmaxCrossEntropyLoss(_Head):
+  """See `_multi_class_head_with_softmax_cross_entropy_loss`."""
+
+  def __init__(self,
+               n_classes,
+               weight_column=None,
+               label_vocabulary=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               loss_fn=None,
+               name=None):
+    if n_classes is None:
+      raise ValueError('n_classes cannot be None')
+    self._n_classes = _validate_n_classes(n_classes)
+    self._weight_column = weight_column
+    self._label_vocabulary = label_vocabulary
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._name = name
+
+  @property
+  def name(self):
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    return self._n_classes
+
+  def _eval_metric_ops(self, labels, class_ids, weights, unreduced_loss,
+                       regularization_loss):
+    """Returns the Eval metric ops."""
+    with ops.name_scope(
+        None, 'metrics',
+        (labels, class_ids, weights, unreduced_loss, regularization_loss)):
+      keys = metric_keys.MetricKeys
+      metric_ops = {
+          # Estimator already adds a metric for loss.
+          # TODO(xiejw): Any other metrics?
+          _summary_key(self._name, keys.LOSS_MEAN):
+              tf.compat.v1.metrics.mean(
+                  values=unreduced_loss, weights=weights, name=keys.LOSS_MEAN),
+          _summary_key(self._name, keys.ACCURACY):
+              tf.compat.v1.metrics.accuracy(
+                  labels=labels,
+                  predictions=class_ids,
+                  weights=weights,
+                  name=keys.ACCURACY),
+      }
+      if regularization_loss is not None:
+        metric_ops[_summary_key(self._name, keys.LOSS_REGULARIZATION)] = (
+            tf.compat.v1.metrics.mean(
+                values=regularization_loss, name=keys.LOSS_REGULARIZATION))
+    return metric_ops
+
+  def _label_ids(self, labels):
+    """Converts labels to integer id space."""
+    if self._label_vocabulary is None:
+      if not labels.dtype.is_integer:
+        raise ValueError(
+            'Labels dtype should be integer. Instead got {}.'.format(
+                labels.dtype))
+      label_ids = labels
+    else:
+      if labels.dtype != tf.dtypes.string:
+        raise ValueError('Labels dtype should be string if there is a '
+                         'vocabulary. Instead got {}'.format(labels.dtype))
+      label_ids = lookup_ops.index_table_from_tensor(
+          vocabulary_list=tuple(self._label_vocabulary),
+          name='class_id_lookup').lookup(labels)
+    return _assert_range(label_ids, self._n_classes)
+
+  def create_loss(self, features, mode, logits, labels):
+    """See `Head`."""
+    del mode  # Unused for this head.
+    logits = ops.convert_to_tensor(logits)
+    labels = _check_dense_labels_match_logits_and_reshape(
+        labels=labels, logits=logits, expected_labels_dimension=1)
+    label_ids = self._label_ids(labels)
+    if self._loss_fn:
+      unweighted_loss = _call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=label_ids,
+          logits=logits,
+          features=features,
+          expected_loss_dim=1)
+    else:
+      unweighted_loss = tf.compat.v1.losses.sparse_softmax_cross_entropy(
+          labels=label_ids,
+          logits=logits,
+          reduction=tf.compat.v1.losses.Reduction.NONE)
+      # Restore the squeezed dim, so unweighted_loss matches the weights shape.
+      unweighted_loss = tf.compat.v1.expand_dims(unweighted_loss, axis=-1)
+    weights = _get_weights_and_check_match_logits(
+        features=features, weight_column=self._weight_column, logits=logits)
+    training_loss = tf.compat.v1.losses.compute_weighted_loss(
+        unweighted_loss, weights=weights, reduction=self._loss_reduction)
+    return LossSpec(
+        training_loss=training_loss,
+        unreduced_loss=unweighted_loss,
+        weights=weights,
+        processed_labels=label_ids)
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 train_op_fn=None,
+                                 regularization_losses=None):
+    """Returns a `model_fn._TPUEstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      labels: Labels integer or string `Tensor` with shape matching `logits`,
+        namely `[D0, D1, ... DN, 1]` or `[D0, D1, ... DN]`. `labels` is required
+        argument when `mode` equals `TRAIN` or `EVAL`.
+      optimizer: `Optimizer` instance to optimize the loss in TRAIN mode.
+        Namely, sets `train_op = optimizer.minimize(loss, global_step)`, which
+        updates variables and increments `global_step`.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec` instance.
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    with ops.name_scope(self._name, 'head'):
+      logits = _check_logits_final_dim(logits, self.logits_dimension)
+
+      # Predict.
+      pred_keys = prediction_keys.PredictionKeys
+      with ops.name_scope(None, 'predictions', (logits,)):
+        all_class_ids = _all_class_ids(logits, self._n_classes)
+        all_classes = _all_classes(
+            logits, self._n_classes, label_vocabulary=self._label_vocabulary)
+        # class_ids's shape is [D0, D1, ... DN].
+        class_ids = tf.compat.v1.math.argmax(
+            logits, axis=-1, name=pred_keys.CLASS_IDS)
+        class_ids = tf.compat.v1.expand_dims(class_ids, axis=-1)
+        if self._label_vocabulary:
+          table = lookup_ops.index_to_string_table_from_tensor(
+              vocabulary_list=self._label_vocabulary,
+              name='class_string_lookup')
+          classes = table.lookup(class_ids)
+        else:
+          classes = tf.strings.as_string(class_ids, name='str_classes')
+
+        probabilities = tf.compat.v1.nn.softmax(
+            logits, name=pred_keys.PROBABILITIES)
+        predictions = {
+            pred_keys.LOGITS: logits,
+            pred_keys.PROBABILITIES: probabilities,
+            # Expand to [batch_size, 1]
+            pred_keys.CLASS_IDS: class_ids,
+            pred_keys.CLASSES: classes,
+            pred_keys.ALL_CLASS_IDS: all_class_ids,
+            pred_keys.ALL_CLASSES: all_classes,
+        }
+      if mode == ModeKeys.PREDICT:
+        classifier_output = _classification_output(
+            scores=probabilities,
+            n_classes=self._n_classes,
+            label_vocabulary=self._label_vocabulary)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                _DEFAULT_SERVING_KEY: classifier_output,
+                _CLASSIFY_SERVING_KEY: classifier_output,
+                _PREDICT_SERVING_KEY: export_output.PredictOutput(predictions)
+            })
+
+      training_loss, unreduced_loss, weights, label_ids = self.create_loss(
+          features=features, mode=mode, logits=logits, labels=labels)
+      if regularization_losses:
+        regularization_loss = tf.math.add_n(regularization_losses)
+        regularized_training_loss = tf.math.add_n(
+            [training_loss, regularization_loss])
+      else:
+        regularization_loss = None
+        regularized_training_loss = training_loss
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=_create_eval_metrics_tuple(
+                self._eval_metric_ops, {
+                    'labels': label_ids,
+                    'class_ids': class_ids,
+                    'weights': weights,
+                    'unreduced_loss': unreduced_loss,
+                    'regularization_loss': regularization_loss
+                }))
+
+      # Train.
+      if optimizer is not None:
+        if train_op_fn is not None:
+          raise ValueError('train_op_fn and optimizer cannot both be set.')
+        train_op = optimizer.minimize(
+            regularized_training_loss,
+            global_step=tf.compat.v1.train.get_global_step())
+      elif train_op_fn is not None:
+        train_op = train_op_fn(regularized_training_loss)
+      else:
+        raise ValueError('train_op_fn and optimizer cannot both be None.')
+      train_op = _append_update_ops(train_op)
+      # Only summarize mean_loss for SUM reduction to preserve backwards
+      # compatibility. Otherwise skip it to avoid unnecessary computation.
+      if self._loss_reduction == tf.compat.v1.losses.Reduction.SUM:
+        example_weight_sum = tf.math.reduce_sum(
+            weights * tf.compat.v1.ones_like(unreduced_loss))
+        mean_loss = training_loss / example_weight_sum
+      else:
+        mean_loss = None
+    with ops.name_scope(''):
+      keys = metric_keys.MetricKeys
+      tf.compat.v1.summary.scalar(
+          _summary_key(self._name, keys.LOSS), regularized_training_loss)
+      if mean_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_MEAN), mean_loss)
+      if regularization_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_REGULARIZATION),
+            regularization_loss)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
+
+
+def _binary_logistic_head_with_sigmoid_cross_entropy_loss(
+    weight_column=None,
+    thresholds=None,
+    label_vocabulary=None,
+    loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+    loss_fn=None,
+    name=None):
+  """Creates a `_Head` for single label binary classification.
+
+  This head uses `sigmoid_cross_entropy_with_logits` loss.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, 1]`.
+  In many applications, the shape is `[batch_size, 1]`.
+
+  `labels` must be a dense `Tensor` with shape matching `logits`, namely
+  `[D0, D1, ... DN, 1]`. If `label_vocabulary` given, `labels` must be a string
+  `Tensor` with values from the vocabulary. If `label_vocabulary` is not given,
+  `labels` must be float `Tensor` with values in the interval `[0, 1]`.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, or `[D0, D1, ... DN, 1]`.
+
+  The loss is the weighted sum over the input dimensions. Namely, if the input
+  labels have shape `[batch_size, 1]`, the loss is the weighted sum over
+  `batch_size`.
+
+  Also supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features)` as arguments and returns unreduced loss with
+  shape `[D0, D1, ... DN, 1]`. `loss_fn` must support float `labels` with
+  shape `[D0, D1, ... DN, 1]`. Namely, the head applies `label_vocabulary` to
+  the input labels before passing them to `loss_fn`.
+
+  Args:
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    thresholds: Iterable of floats in the range `(0, 1)`. For binary
+      classification metrics such as precision and recall, an eval metric is
+      generated for each threshold value. This threshold is applied to the
+      logistic values to determine the binary classification (i.e., above the
+      threshold is `true`, below is `false`.
+    label_vocabulary: A list or tuple of strings representing possible label
+      values. If it is not given, that means labels are already encoded within
+      [0, 1]. If given, labels must be string type and have any value in
+      `label_vocabulary`. Note that errors will be raised if `label_vocabulary`
+      is not provided but labels are strings.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how to
+      reduce training loss over batch. Defaults to `SUM`.
+    loss_fn: Optional loss function.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+
+  Returns:
+    An instance of `_Head` for binary classification.
+
+  Raises:
+    ValueError: If `thresholds` contains a value outside of `(0, 1)`.
+    ValueError: If `loss_reduction` is invalid.
+    TypeError: if `label_vocabulary` has invalid type.
+  """
+  thresholds = tuple(thresholds) if thresholds else tuple()
+  if label_vocabulary is not None and not isinstance(label_vocabulary,
+                                                     (list, tuple)):
+    raise TypeError(
+        'label_vocabulary should be a list or tuple. Given type: {}'.format(
+            type(label_vocabulary)))
+
+  for threshold in thresholds:
+    if (threshold <= 0.0) or (threshold >= 1.0):
+      raise ValueError('thresholds not in (0, 1): {}.'.format((thresholds,)))
+  if (loss_reduction not in tf.compat.v1.losses.Reduction.all() or
+      loss_reduction == tf.compat.v1.losses.Reduction.NONE):
+    raise ValueError('Invalid loss_reduction: {}'.format(loss_reduction))
+  if loss_fn:
+    _validate_loss_fn_args(loss_fn)
+  return _BinaryLogisticHeadWithSigmoidCrossEntropyLoss(
+      weight_column=weight_column,
+      thresholds=thresholds,
+      label_vocabulary=label_vocabulary,
+      loss_reduction=loss_reduction,
+      loss_fn=loss_fn,
+      name=name)
+
+
+class _BinaryLogisticHeadWithSigmoidCrossEntropyLoss(_Head):
+  """See `_binary_logistic_head_with_sigmoid_cross_entropy_loss`."""
+
+  def __init__(self,
+               weight_column=None,
+               thresholds=None,
+               label_vocabulary=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               loss_fn=None,
+               name=None):
+    self._weight_column = weight_column
+    self._thresholds = tuple(thresholds) if thresholds else tuple()
+    self._label_vocabulary = label_vocabulary
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._name = name
+
+  @property
+  def name(self):
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    return 1
+
+  def _eval_metric_ops(self, labels, logits, logistic, class_ids, weights,
+                       unreduced_loss, regularization_loss):
+    with ops.name_scope(None, 'metrics',
+                        (labels, logits, logistic, class_ids, weights,
+                         unreduced_loss, regularization_loss)):
+      keys = metric_keys.MetricKeys
+      labels_mean = _indicator_labels_mean(
+          labels=labels, weights=weights, name=keys.LABEL_MEAN)
+      metric_ops = {
+          # Estimator already adds a metric for loss.
+          _summary_key(self._name, keys.LOSS_MEAN):
+              tf.compat.v1.metrics.mean(
+                  values=unreduced_loss, weights=weights, name=keys.LOSS_MEAN),
+          _summary_key(self._name, keys.ACCURACY):
+              tf.compat.v1.metrics.accuracy(
+                  labels=labels,
+                  predictions=class_ids,
+                  weights=weights,
+                  name=keys.ACCURACY),
+          _summary_key(self._name, keys.PRECISION):
+              tf.compat.v1.metrics.precision(
+                  labels=labels,
+                  predictions=class_ids,
+                  weights=weights,
+                  name=keys.PRECISION),
+          _summary_key(self._name, keys.RECALL):
+              tf.compat.v1.metrics.recall(
+                  labels=labels,
+                  predictions=class_ids,
+                  weights=weights,
+                  name=keys.RECALL),
+          _summary_key(self._name, keys.PREDICTION_MEAN):
+              _predictions_mean(
+                  predictions=logistic,
+                  weights=weights,
+                  name=keys.PREDICTION_MEAN),
+          _summary_key(self._name, keys.LABEL_MEAN):
+              labels_mean,
+          _summary_key(self._name, keys.ACCURACY_BASELINE):
+              _accuracy_baseline(labels_mean),
+          _summary_key(self._name, keys.AUC):
+              _auc(
+                  labels=labels,
+                  predictions=logistic,
+                  weights=weights,
+                  name=keys.AUC),
+          _summary_key(self._name, keys.AUC_PR):
+              _auc(
+                  labels=labels,
+                  predictions=logistic,
+                  weights=weights,
+                  curve='PR',
+                  name=keys.AUC_PR)
+      }
+      if regularization_loss is not None:
+        metric_ops[_summary_key(self._name, keys.LOSS_REGULARIZATION)] = (
+            tf.compat.v1.metrics.mean(
+                values=regularization_loss, name=keys.LOSS_REGULARIZATION))
+      for threshold in self._thresholds:
+        accuracy_key = keys.ACCURACY_AT_THRESHOLD % threshold
+        metric_ops[_summary_key(self._name,
+                                accuracy_key)] = _accuracy_at_threshold(
+                                    labels=labels,
+                                    predictions=logistic,
+                                    weights=weights,
+                                    threshold=threshold,
+                                    name=accuracy_key)
+        # Precision for positive examples.
+        precision_key = keys.PRECISION_AT_THRESHOLD % threshold
+        metric_ops[_summary_key(self._name,
+                                precision_key)] = _precision_at_threshold(
+                                    labels=labels,
+                                    predictions=logistic,
+                                    weights=weights,
+                                    threshold=threshold,
+                                    name=precision_key)
+        # Recall for positive examples.
+        recall_key = keys.RECALL_AT_THRESHOLD % threshold
+        metric_ops[_summary_key(self._name, recall_key)] = _recall_at_threshold(
+            labels=labels,
+            predictions=logistic,
+            weights=weights,
+            threshold=threshold,
+            name=recall_key)
+      return metric_ops
+
+  def create_loss(self, features, mode, logits, labels):
+    """See `Head`."""
+    del mode  # Unused for this head.
+    logits = ops.convert_to_tensor(logits)
+    labels = _check_dense_labels_match_logits_and_reshape(
+        labels=labels, logits=logits, expected_labels_dimension=1)
+    if self._label_vocabulary is not None:
+      labels = lookup_ops.index_table_from_tensor(
+          vocabulary_list=tuple(self._label_vocabulary),
+          name='class_id_lookup').lookup(labels)
+    labels = tf.cast(labels, dtype=tf.dtypes.float32)
+    labels = _assert_range(labels, n_classes=2)
+    if self._loss_fn:
+      unweighted_loss = _call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=labels,
+          logits=logits,
+          features=features,
+          expected_loss_dim=1)
+    else:
+      unweighted_loss = tf.compat.v1.nn.sigmoid_cross_entropy_with_logits(
+          labels=labels, logits=logits)
+    weights = _get_weights_and_check_match_logits(
+        features=features, weight_column=self._weight_column, logits=logits)
+    training_loss = tf.compat.v1.losses.compute_weighted_loss(
+        unweighted_loss, weights=weights, reduction=self._loss_reduction)
+    return LossSpec(
+        training_loss=training_loss,
+        unreduced_loss=unweighted_loss,
+        weights=weights,
+        processed_labels=labels)
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 train_op_fn=None,
+                                 regularization_losses=None):
+    """Returns an `EstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, 1]`. For many
+        applications, the shape is `[batch_size, 1]`.
+      labels: Labels integer or string `Tensor` with shape matching `logits`,
+        namely `[D0, D1, ... DN, 1]` or `[D0, D1, ... DN]`. `labels` is required
+        argument when `mode` equals `TRAIN` or `EVAL`.
+      optimizer: `Optimizer` instance to optimize the loss in TRAIN mode.
+        Namely, sets `train_op = optimizer.minimize(loss, global_step)`, which
+        updates variables and increments `global_step`.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      `EstimatorSpec`.
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    # Predict.
+    with ops.name_scope(self._name, 'head'):
+      with ops.name_scope(None, 'predictions', (logits,)):
+        pred_keys = prediction_keys.PredictionKeys
+        logits = _check_logits_final_dim(logits, self.logits_dimension)
+        logistic = tf.math.sigmoid(logits, name=pred_keys.LOGISTIC)
+        two_class_logits = tf.concat((tf.compat.v1.zeros_like(logits), logits),
+                                     axis=-1,
+                                     name='two_class_logits')
+        probabilities = tf.compat.v1.nn.softmax(
+            two_class_logits, name=pred_keys.PROBABILITIES)
+        class_ids = tf.compat.v1.math.argmax(
+            two_class_logits, axis=-1, name=pred_keys.CLASS_IDS)
+        class_ids = tf.compat.v1.expand_dims(class_ids, axis=-1)
+        all_class_ids = _all_class_ids(logits, n_classes=2)
+        all_classes = _all_classes(
+            logits, n_classes=2, label_vocabulary=self._label_vocabulary)
+
+        if self._label_vocabulary:
+          table = lookup_ops.index_to_string_table_from_tensor(
+              vocabulary_list=self._label_vocabulary,
+              name='class_string_lookup')
+          classes = table.lookup(class_ids)
+        else:
+          classes = string_ops.as_string(class_ids, name='str_classes')
+        predictions = {
+            pred_keys.LOGITS: logits,
+            pred_keys.LOGISTIC: logistic,
+            pred_keys.PROBABILITIES: probabilities,
+            pred_keys.CLASS_IDS: class_ids,
+            pred_keys.CLASSES: classes,
+            pred_keys.ALL_CLASS_IDS: all_class_ids,
+            pred_keys.ALL_CLASSES: all_classes,
+        }
+      if mode == ModeKeys.PREDICT:
+        classifier_output = _classification_output(
+            scores=probabilities,
+            n_classes=2,
+            label_vocabulary=self._label_vocabulary)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                _DEFAULT_SERVING_KEY: classifier_output,
+                _CLASSIFY_SERVING_KEY: classifier_output,
+                _REGRESS_SERVING_KEY: export_output.RegressionOutput(
+                    value=logistic),
+                _PREDICT_SERVING_KEY: export_output.PredictOutput(predictions)
+            })
+
+      (training_loss, unreduced_loss, weights, processed_labels) = (
+          self.create_loss(
+              features=features, mode=mode, logits=logits, labels=labels))
+      if regularization_losses:
+        regularization_loss = tf.math.add_n(regularization_losses)
+        regularized_training_loss = tf.math.add_n(
+            [training_loss, regularization_loss])
+      else:
+        regularization_loss = None
+        regularized_training_loss = training_loss
+
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=_create_eval_metrics_tuple(
+                self._eval_metric_ops, {
+                    'labels': processed_labels,
+                    'logits': logits,
+                    'logistic': logistic,
+                    'class_ids': class_ids,
+                    'weights': weights,
+                    'unreduced_loss': unreduced_loss,
+                    'regularization_loss': regularization_loss
+                }))
+
+      # Train.
+      if optimizer is not None:
+        if train_op_fn is not None:
+          raise ValueError('train_op_fn and optimizer cannot both be set.')
+        train_op = optimizer.minimize(
+            regularized_training_loss,
+            global_step=tf.compat.v1.train.get_global_step())
+      elif train_op_fn is not None:
+        train_op = train_op_fn(regularized_training_loss)
+      else:
+        raise ValueError('train_op_fn and optimizer cannot both be None.')
+      train_op = _append_update_ops(train_op)
+      # Only summarize mean_loss for SUM reduction to preserve backwards
+      # compatibility. Otherwise skip it to avoid unnecessary computation.
+      if self._loss_reduction == tf.compat.v1.losses.Reduction.SUM:
+        example_weight_sum = tf.math.reduce_sum(
+            weights * tf.compat.v1.ones_like(unreduced_loss))
+        mean_loss = training_loss / example_weight_sum
+      else:
+        mean_loss = None
+    with ops.name_scope(''):
+      keys = metric_keys.MetricKeys
+      tf.compat.v1.summary.scalar(
+          _summary_key(self._name, keys.LOSS), regularized_training_loss)
+      if mean_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_MEAN), mean_loss)
+      if regularization_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_REGULARIZATION),
+            regularization_loss)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
+
+
+def _regression_head(weight_column=None,
+                     label_dimension=1,
+                     loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+                     loss_fn=None,
+                     inverse_link_fn=None,
+                     name=None):
+  """Creates a `_Head` for regression using the `mean_squared_error` loss.
+
+  The loss is the weighted sum over all input dimensions. Namely, if the input
+  labels have shape `[batch_size, label_dimension]`, the loss is the weighted
+  sum over both `batch_size` and `label_dimension`.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, label_dimension]`.
+  In many applications, the shape is `[batch_size, label_dimension]`.
+
+  The `labels` shape must match `logits`, namely
+  `[D0, D1, ... DN, label_dimension]`. If `label_dimension=1`, shape
+  `[D0, D1, ... DN]` is also supported.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, `[D0, D1, ... DN, 1]` or
+  `[D0, D1, ... DN, label_dimension]`.
+
+  Supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features)` as arguments and returns unreduced loss with
+  shape `[D0, D1, ... DN, label_dimension]`.
+
+  Also supports custom `inverse_link_fn`, also known as 'mean function'.
+  `inverse_link_fn` takes `logits` as argument and returns predicted values.
+  This function is the inverse of the link function defined in
+  https://en.wikipedia.org/wiki/Generalized_linear_model#Link_function
+  Namely, for poisson regression, set `inverse_link_fn=tf.exp`.
+
+  Args:
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    label_dimension: Number of regression labels per example. This is the size
+      of the last dimension of the labels `Tensor` (typically, this has shape
+      `[batch_size, label_dimension]`).
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how to
+      reduce training loss over batch. Defaults to `SUM`.
+    loss_fn: Optional loss function. Defaults to `mean_squared_error`.
+    inverse_link_fn: Optional inverse link function, also known as 'mean
+      function'. Defaults to identity.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+
+  Returns:
+    An instance of `_Head` for linear regression.
+
+  Raises:
+    ValueError: If `label_dimension` or `loss_reduction` is invalid.
+  """
+  if (loss_reduction not in tf.compat.v1.losses.Reduction.all() or
+      loss_reduction == tf.compat.v1.losses.Reduction.NONE):
+    raise ValueError('Invalid loss_reduction: {}'.format(loss_reduction))
+  if loss_fn:
+    _validate_loss_fn_args(loss_fn)
+  return _RegressionHeadWithMeanSquaredErrorLoss(
+      weight_column=weight_column,
+      label_dimension=label_dimension,
+      loss_reduction=loss_reduction,
+      loss_fn=loss_fn,
+      inverse_link_fn=inverse_link_fn,
+      name=name)
+
+
+class _RegressionHeadWithMeanSquaredErrorLoss(_Head):
+  """`Head` for regression using the mean squared loss."""
+
+  def __init__(self,
+               label_dimension,
+               weight_column=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               loss_fn=None,
+               inverse_link_fn=None,
+               name=None):
+    """`Head` for regression."""
+    if label_dimension < 1:
+      raise ValueError('Invalid label_dimension %s.' % label_dimension)
+    self._logits_dimension = label_dimension
+    self._weight_column = weight_column
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._inverse_link_fn = inverse_link_fn
+    self._name = name
+
+  @property
+  def name(self):
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    return self._logits_dimension
+
+  def create_loss(self, features, mode, logits, labels):
+    """See `Head`."""
+    del mode  # Unused for this head.
+    logits = ops.convert_to_tensor(logits)
+    labels = _check_dense_labels_match_logits_and_reshape(
+        labels=labels,
+        logits=logits,
+        expected_labels_dimension=self._logits_dimension)
+    labels = tf.cast(labels, dtype=tf.dtypes.float32)
+    if self._loss_fn:
+      unweighted_loss = _call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=labels,
+          logits=logits,
+          features=features,
+          expected_loss_dim=self._logits_dimension)
+    else:
+      unweighted_loss = tf.compat.v1.losses.mean_squared_error(
+          labels=labels,
+          predictions=logits,
+          reduction=tf.compat.v1.losses.Reduction.NONE)
+    weights = _get_weights_and_check_match_logits(
+        features=features,
+        weight_column=self._weight_column,
+        logits=logits,
+        allow_per_logit_weights=True)
+    training_loss = tf.compat.v1.losses.compute_weighted_loss(
+        unweighted_loss, weights=weights, reduction=self._loss_reduction)
+    return LossSpec(
+        training_loss=training_loss,
+        unreduced_loss=unweighted_loss,
+        weights=weights,
+        processed_labels=labels)
+
+  def _eval_metric_ops(self, predicted_value, labels, weights, unreduced_loss,
+                       regularization_loss):
+    """Returns the Eval metric ops."""
+    keys = metric_keys.MetricKeys
+    # Estimator already adds a metric for loss.
+    eval_metric_ops = {
+        _summary_key(self._name, keys.LOSS_MEAN):
+            tf.compat.v1.metrics.mean(values=unreduced_loss, weights=weights),
+        _summary_key(self._name, keys.PREDICTION_MEAN):
+            _predictions_mean(
+                predictions=predicted_value,
+                weights=weights,
+                name=keys.PREDICTION_MEAN),
+        _summary_key(self._name, keys.LABEL_MEAN):
+            tf.compat.v1.metrics.mean(values=labels, weights=weights)
+    }
+    if regularization_loss is not None:
+      regularization_loss_key = _summary_key(self._name,
+                                             keys.LOSS_REGULARIZATION)
+      eval_metric_ops[regularization_loss_key] = tf.compat.v1.metrics.mean(
+          values=regularization_loss, name=keys.LOSS_REGULARIZATION)
+    return eval_metric_ops
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 train_op_fn=None,
+                                 regularization_losses=None):
+    """Returns an `EstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      labels: Labels `Tensor` with shape matching `logits`, namely `[D0, D1, ...
+        DN, logits_dimension]`. When `logits_dimension=1`, shape `[D0, D1, ...
+        DN]` is also supported. `labels` is required argument when `mode` equals
+        `TRAIN` or `EVAL`.
+      optimizer: `Optimizer` instance to optimize the loss in TRAIN mode.
+        Namely, sets `train_op = optimizer.minimize(loss, global_step)`, which
+        updates variables and increments `global_step`.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec` instance.
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    # Predict.
+    with ops.name_scope(self._name, 'head'):
+      logits = _check_logits_final_dim(logits, self._logits_dimension)
+      if self._inverse_link_fn:
+        predicted_value = self._inverse_link_fn(logits)
+        predictions = {
+            prediction_keys.PredictionKeys.PREDICTIONS: predicted_value,
+            prediction_keys.PredictionKeys.LOGITS: logits,
+        }
+      else:
+        predicted_value = logits
+        predictions = {
+            prediction_keys.PredictionKeys.PREDICTIONS: predicted_value
+        }
+      if mode == ModeKeys.PREDICT:
+        regression_output = export_output.RegressionOutput(
+            value=predicted_value)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                _DEFAULT_SERVING_KEY: regression_output,
+                _REGRESS_SERVING_KEY: regression_output,
+                _PREDICT_SERVING_KEY: export_output.PredictOutput(predictions)
+            })
+
+      training_loss, unreduced_loss, weights, _ = self.create_loss(
+          features=features, mode=mode, logits=logits, labels=labels)
+      if regularization_losses:
+        regularization_loss = tf.math.add_n(regularization_losses)
+        regularized_training_loss = tf.math.add_n(
+            [training_loss, regularization_loss])
+      else:
+        regularization_loss = None
+        regularized_training_loss = training_loss
+
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=_create_eval_metrics_tuple(
+                self._eval_metric_ops, {
+                    'predicted_value': predicted_value,
+                    'labels': labels,
+                    'weights': weights,
+                    'unreduced_loss': unreduced_loss,
+                    'regularization_loss': regularization_loss,
+                }))
+
+      # Train.
+      if optimizer is not None:
+        if train_op_fn is not None:
+          raise ValueError('train_op_fn and optimizer cannot both be set.')
+        train_op = optimizer.minimize(
+            regularized_training_loss,
+            global_step=tf.compat.v1.train.get_global_step())
+      elif train_op_fn is not None:
+        train_op = train_op_fn(regularized_training_loss)
+      else:
+        raise ValueError('train_op_fn and optimizer cannot both be None.')
+      train_op = _append_update_ops(train_op)
+      # Only summarize mean_loss for SUM reduction to preserve backwards
+      # compatibility. Otherwise skip it to avoid unnecessary computation.
+      if self._loss_reduction == tf.compat.v1.losses.Reduction.SUM:
+        example_weight_sum = tf.math.reduce_sum(
+            weights * tf.compat.v1.ones_like(unreduced_loss))
+        mean_loss = training_loss / example_weight_sum
+      else:
+        mean_loss = None
+    with ops.name_scope(''):
+      keys = metric_keys.MetricKeys
+      tf.compat.v1.summary.scalar(
+          _summary_key(self._name, keys.LOSS), regularized_training_loss)
+      if mean_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_MEAN), mean_loss)
+      if regularization_loss is not None:
+        tf.compat.v1.summary.scalar(
+            _summary_key(self._name, keys.LOSS_REGULARIZATION),
+            regularization_loss)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
+
+
+def _append_update_ops(train_op):
+  """Returns `train_op` appending `UPDATE_OPS` collection if present."""
+  update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS)
+  if update_ops:
+    return tf.group(train_op, *update_ops)
+  return train_op
+
+
+def _assert_range(labels, n_classes, message=None):
+  with ops.name_scope(None, 'assert_range', (labels,)):
+    assert_less = tf.compat.v1.debugging.assert_less_equal(
+        labels,
+        ops.convert_to_tensor(n_classes - 1, dtype=labels.dtype),
+        message=message or 'Labels must <= n_classes - 1')
+    assert_greater = tf.compat.v1.debugging.assert_non_negative(
+        labels, message=message or 'Labels must >= 0')
+    with tf.control_dependencies((assert_less, assert_greater)):
+      return tf.identity(labels)
+
+
+def _binary_logistic_or_multi_class_head(n_classes, weight_column,
+                                         label_vocabulary, loss_reduction):
+  """Creates either binary or multi-class head.
+
+  Args:
+    n_classes: Number of label classes.
+    weight_column: A string or a `_NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example. If it is a string, it is
+      used as a key to fetch weight tensor from the `features`. If it is a
+      `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+    label_vocabulary: A list of strings represents possible label values. If
+      given, labels must be string type and have any value in
+      `label_vocabulary`. If it is not given, that means labels are already
+      encoded as integer or float within [0, 1] for `n_classes=2` and encoded as
+      integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also there
+      will be errors if vocabulary is not provided and labels are string.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how to
+      reduce training loss over batch. Defaults to `SUM`.
+
+  Returns:
+    `head._Head` instance.
+  """
+  if n_classes == 2:
+    head = _binary_logistic_head_with_sigmoid_cross_entropy_loss(
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+  else:
+    head = _multi_class_head_with_softmax_cross_entropy_loss(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+  return head
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/kmeans.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/kmeans.py
new file mode 100644
index 00000000..4b4bd5cc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/kmeans.py
@@ -0,0 +1,479 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""A canned Estimator for k-means clustering."""
+
+# TODO(ccolby): Move clustering_ops.py into this file and streamline the code.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import time
+
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import clustering_ops
+from tensorflow.python.ops import control_flow_ops
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.export import export_output
+
+
+class _LossRelativeChangeHook(tf.compat.v1.train.SessionRunHook):
+  """Stops when the change in loss goes below a tolerance."""
+
+  def __init__(self, loss_tensor, tolerance):
+    """Creates a _LossRelativeChangeHook.
+
+    Args:
+      loss_tensor: A scalar tensor of the loss value.
+      tolerance: A relative tolerance of loss change between iterations.
+    """
+    self._loss_tensor = loss_tensor
+    self._tolerance = tolerance
+    self._prev_loss = None
+
+  def before_run(self, run_context):
+    del run_context  # unused
+    return tf.compat.v1.train.SessionRunArgs(self._loss_tensor)
+
+  def after_run(self, run_context, run_values):
+    loss = run_values.results
+    assert loss is not None
+    if self._prev_loss:
+      relative_change = (
+          abs(loss - self._prev_loss) / (1 + abs(self._prev_loss)))
+      if relative_change < self._tolerance:
+        run_context.request_stop()
+    self._prev_loss = loss
+
+
+class _InitializeClustersHook(tf.compat.v1.train.SessionRunHook):
+  """Initializes the cluster centers.
+
+  The chief repeatedly invokes an initialization op until all cluster centers
+  are initialized. The workers wait for the initialization phase to complete.
+  """
+
+  def __init__(self, init_op, is_initialized_var, is_chief):
+    """Creates an _InitializeClustersHook.
+
+    Args:
+      init_op: An op that, when run, will choose some initial cluster centers.
+        This op may need to be run multiple times to choose all the centers.
+      is_initialized_var: A boolean variable reporting whether all initial
+        centers have been chosen.
+      is_chief: A boolean specifying whether this task is the chief.
+    """
+    self._init_op = init_op
+    self._is_initialized_var = is_initialized_var
+    self._is_chief = is_chief
+
+  def after_create_session(self, session, coord):
+    del coord  # unused
+    assert self._init_op.graph is tf.compat.v1.get_default_graph()
+    assert self._is_initialized_var.graph is self._init_op.graph
+    while True:
+      try:
+        if session.run(self._is_initialized_var):
+          break
+        elif self._is_chief:
+          session.run(self._init_op)
+        else:
+          time.sleep(1)
+      except RuntimeError as e:
+        tf.compat.v1.logging.info(e)
+
+
+def _parse_features_if_necessary(features, feature_columns):
+  """Helper function to convert the input points into a usable format.
+
+  Args:
+    features: The input features.
+    feature_columns: An optionable iterable containing all the feature columns
+      used by the model. All items in the set should be feature column instances
+      that can be passed to `tf.feature_column.input_layer`. If this is None,
+      all features will be used.
+
+  Returns:
+    If `features` is a dict of `k` features (optionally filtered by
+    `feature_columns`), each of which is a vector of `n` scalars, the return
+    value is a Tensor of shape `(n, k)` representing `n` input points, where the
+    items in the `k` dimension are sorted lexicographically by `features` key.
+    If `features` is not a dict, it is returned unmodified.
+  """
+  if not isinstance(features, dict):
+    return features
+
+  if feature_columns:
+    return tf.compat.v1.feature_column.input_layer(features, feature_columns)
+
+  keys = sorted(features.keys())
+  with ops.colocate_with(features[keys[0]]):
+    return tf.concat([features[k] for k in keys], axis=1)
+
+
+class _ModelFn(object):
+  """Model function for the estimator."""
+
+  def __init__(self, num_clusters, initial_clusters, distance_metric, seed,
+               use_mini_batch, mini_batch_steps_per_iteration,
+               kmeans_plus_plus_num_retries, relative_tolerance,
+               feature_columns):
+    self._num_clusters = num_clusters
+    self._initial_clusters = initial_clusters
+    self._distance_metric = distance_metric
+    self._seed = seed
+    self._use_mini_batch = use_mini_batch
+    self._mini_batch_steps_per_iteration = mini_batch_steps_per_iteration
+    self._kmeans_plus_plus_num_retries = kmeans_plus_plus_num_retries
+    self._relative_tolerance = relative_tolerance
+    self._feature_columns = feature_columns
+
+  def model_fn(self, features, mode, config):
+    """Model function for the estimator.
+
+    Note that this does not take a `labels` arg. This works, but `input_fn` must
+    return either `features` or, equivalently, `(features, None)`.
+
+    Args:
+      features: The input points. See `tf.estimator.Estimator`.
+      mode: See `tf.estimator.Estimator`.
+      config: See `tf.estimator.Estimator`.
+
+    Returns:
+      A `tf.estimator.EstimatorSpec` (see `tf.estimator.Estimator`) specifying
+      this behavior:
+        * `train_op`: Execute one mini-batch or full-batch run of Lloyd's
+             algorithm.
+        * `loss`: The sum of the squared distances from each input point to its
+             closest center.
+        * `eval_metric_ops`: Maps `SCORE` to `loss`.
+        * `predictions`: Maps `ALL_DISTANCES` to the distance from each input
+             point to each cluster center; maps `CLUSTER_INDEX` to the index of
+             the closest cluster center for each input point.
+    """
+    # input_points is a single Tensor. Therefore, the sharding functionality
+    # in clustering_ops is unused, and some of the values below are lists of a
+    # single item.
+    input_points = _parse_features_if_necessary(features, self._feature_columns)
+
+    # Let N = the number of input_points.
+    # all_distances: A list of one matrix of shape (N, num_clusters). Each value
+    #   is the distance from an input point to a cluster center.
+    # model_predictions: A list of one vector of shape (N). Each value is the
+    #   cluster id of an input point.
+    # losses: Similar to cluster_idx but provides the distance to the cluster
+    #   center.
+    # is_initialized: scalar indicating whether the initial cluster centers
+    #   have been chosen; see init_op.
+    # init_op: an op to choose the initial cluster centers. A single worker
+    #   repeatedly executes init_op until is_initialized becomes True.
+    # training_op: an op that runs an iteration of training, either an entire
+    #   Lloyd iteration or a mini-batch of a Lloyd iteration. Multiple workers
+    #   may execute this op, but only after is_initialized becomes True.
+    (all_distances, model_predictions, losses, is_initialized, init_op,
+     training_op) = clustering_ops.KMeans(
+         inputs=input_points,
+         num_clusters=self._num_clusters,
+         initial_clusters=self._initial_clusters,
+         distance_metric=self._distance_metric,
+         use_mini_batch=self._use_mini_batch,
+         mini_batch_steps_per_iteration=self._mini_batch_steps_per_iteration,
+         random_seed=self._seed,
+         kmeans_plus_plus_num_retries=self._kmeans_plus_plus_num_retries
+     ).training_graph()
+
+    loss = tf.math.reduce_sum(losses)
+    tf.compat.v1.summary.scalar('loss/raw', loss)
+
+    incr_step = tf.compat.v1.assign_add(tf.compat.v1.train.get_global_step(), 1)
+    training_op = control_flow_ops.with_dependencies([training_op, incr_step],
+                                                     loss)
+
+    training_hooks = [
+        _InitializeClustersHook(init_op, is_initialized, config.is_chief)
+    ]
+    if self._relative_tolerance is not None:
+      training_hooks.append(
+          _LossRelativeChangeHook(loss, self._relative_tolerance))
+
+    export_outputs = {
+        KMeansClustering.ALL_DISTANCES:
+            export_output.PredictOutput(all_distances[0]),
+        KMeansClustering.CLUSTER_INDEX:
+            export_output.PredictOutput(model_predictions[0]),
+        tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
+            export_output.PredictOutput(model_predictions[0])
+    }
+
+    return model_fn_lib.EstimatorSpec(
+        mode=mode,
+        predictions={
+            KMeansClustering.ALL_DISTANCES: all_distances[0],
+            KMeansClustering.CLUSTER_INDEX: model_predictions[0],
+        },
+        loss=loss,
+        train_op=training_op,
+        eval_metric_ops={
+            KMeansClustering.SCORE: tf.compat.v1.metrics.mean(loss)
+        },
+        training_hooks=training_hooks,
+        export_outputs=export_outputs)
+
+
+# TODO(agarwal,ands): support sharded input.
+@estimator_export(v1=['estimator.experimental.KMeans'])
+class KMeansClustering(estimator.Estimator):
+  """An Estimator for K-Means clustering.
+
+  Example:
+  ```
+  import numpy as np
+  import tensorflow as tf
+
+  num_points = 100
+  dimensions = 2
+  points = np.random.uniform(0, 1000, [num_points, dimensions])
+
+  def input_fn():
+    return tf.compat.v1.train.limit_epochs(
+        tf.convert_to_tensor(points, dtype=tf.float32), num_epochs=1)
+
+  num_clusters = 5
+  kmeans = tf.compat.v1.estimator.experimental.KMeans(
+      num_clusters=num_clusters, use_mini_batch=False)
+
+  # train
+  num_iterations = 10
+  previous_centers = None
+  for _ in xrange(num_iterations):
+    kmeans.train(input_fn)
+    cluster_centers = kmeans.cluster_centers()
+    if previous_centers is not None:
+      print 'delta:', cluster_centers - previous_centers
+    previous_centers = cluster_centers
+    print 'score:', kmeans.score(input_fn)
+  print 'cluster centers:', cluster_centers
+
+  # map the input points to their clusters
+  cluster_indices = list(kmeans.predict_cluster_index(input_fn))
+  for i, point in enumerate(points):
+    cluster_index = cluster_indices[i]
+    center = cluster_centers[cluster_index]
+    print 'point:', point, 'is in cluster', cluster_index, 'centered at', center
+  ```
+
+  The `SavedModel` saved by the `export_saved_model` method does not include the
+  cluster centers. However, the cluster centers may be retrieved by the
+  latest checkpoint saved during training. Specifically,
+  ```
+  kmeans.cluster_centers()
+  ```
+  is equivalent to
+  ```
+  tf.train.load_variable(
+      kmeans.model_dir, KMeansClustering.CLUSTER_CENTERS_VAR_NAME)
+  ```
+  """
+
+  # Valid values for the distance_metric constructor argument.
+  SQUARED_EUCLIDEAN_DISTANCE = clustering_ops.SQUARED_EUCLIDEAN_DISTANCE
+  COSINE_DISTANCE = clustering_ops.COSINE_DISTANCE
+
+  # Values for initial_clusters constructor argument.
+  RANDOM_INIT = clustering_ops.RANDOM_INIT
+  KMEANS_PLUS_PLUS_INIT = clustering_ops.KMEANS_PLUS_PLUS_INIT
+
+  # Metric returned by evaluate(): The sum of the squared distances from each
+  # input point to its closest center.
+  SCORE = 'score'
+
+  # Keys returned by predict().
+  # ALL_DISTANCES: The distance from each input point to each cluster center.
+  # CLUSTER_INDEX: The index of the closest cluster center for each input point.
+  CLUSTER_INDEX = 'cluster_index'
+  ALL_DISTANCES = 'all_distances'
+
+  # Variable name used by cluster_centers().
+  CLUSTER_CENTERS_VAR_NAME = clustering_ops.CLUSTERS_VAR_NAME
+
+  def __init__(self,
+               num_clusters,
+               model_dir=None,
+               initial_clusters=RANDOM_INIT,
+               distance_metric=SQUARED_EUCLIDEAN_DISTANCE,
+               seed=None,
+               use_mini_batch=True,
+               mini_batch_steps_per_iteration=1,
+               kmeans_plus_plus_num_retries=2,
+               relative_tolerance=None,
+               config=None,
+               feature_columns=None):
+    r"""Creates an Estimator for running KMeans training and inference.
+
+    This Estimator implements the following variants of the K-means algorithm:
+
+    If `use_mini_batch` is False, it runs standard full batch K-means. Each
+    training step runs a single iteration of K-Means and must process the full
+    input at once. To run in this mode, the `input_fn` passed to `train` must
+    return the entire input dataset.
+
+    If `use_mini_batch` is True, it runs a generalization of the mini-batch
+    K-means algorithm. It runs multiple iterations, where each iteration is
+    composed of `mini_batch_steps_per_iteration` steps. Each training step
+    accumulates the contribution from one mini-batch into temporary storage.
+    Every `mini_batch_steps_per_iteration` steps, the cluster centers are
+    updated and the temporary storage cleared for the next iteration.
+    For example: the entire dataset contains 64k examples, where the batch size
+    is 64. User can choose mini_batch_steps_per_iteration = 100 to run 10% of
+    the entire data every iteration in order to update the cluster centers.
+    Note that:
+      * If `mini_batch_steps_per_iteration=1`, the algorithm reduces to the
+        standard K-means mini-batch algorithm.
+      * If `mini_batch_steps_per_iteration = num_inputs / batch_size`, the
+        algorithm becomes an asynchronous version of the full-batch algorithm.
+        However, there is no guarantee by this implementation that each input
+        is seen exactly once per iteration. Also, different updates are applied
+        asynchronously without locking. So this asynchronous version may not
+        behave exactly like a full-batch version.
+
+    Args:
+      num_clusters: An integer tensor specifying the number of clusters. This
+        argument is ignored if `initial_clusters` is a tensor or numpy array.
+      model_dir: The directory to save the model results and log files.
+      initial_clusters: Specifies how the initial cluster centers are chosen.
+        One of the following: * a tensor or numpy array with the initial cluster
+          centers. * a callable `f(inputs, k)` that selects and returns up to
+          `k` centers from an input batch. `f` is free to return any number of
+          centers from `0` to `k`. It will be invoked on successive input
+          batches as necessary until all `num_clusters` centers are chosen.
+        * `KMeansClustering.RANDOM_INIT`: Choose centers randomly from an input
+          batch. If the batch size is less than `num_clusters` then the entire
+          batch is chosen to be initial cluster centers and the remaining
+          centers are chosen from successive input batches.
+        * `KMeansClustering.KMEANS_PLUS_PLUS_INIT`: Use kmeans++ to choose
+          centers from the first input batch. If the batch size is less than
+          `num_clusters`, a TensorFlow runtime error occurs.
+      distance_metric: The distance metric used for clustering. One of:
+        * `KMeansClustering.SQUARED_EUCLIDEAN_DISTANCE`: Euclidean distance
+          between vectors `u` and `v` is defined as \\(||u - v||_2\\) which is
+          the square root of the sum of the absolute squares of the elements'
+          difference.
+        * `KMeansClustering.COSINE_DISTANCE`: Cosine distance between vectors
+          `u` and `v` is defined as \\(1 - (u . v) / (||u||_2 ||v||_2)\\).
+      seed: Python integer. Seed for PRNG used to initialize centers.
+      use_mini_batch: A boolean specifying whether to use the mini-batch k-means
+        algorithm. See explanation above.
+      mini_batch_steps_per_iteration: The number of steps after which the
+        updated cluster centers are synced back to a master copy. Used only if
+        `use_mini_batch=True`. See explanation above.
+      kmeans_plus_plus_num_retries: For each point that is sampled during
+        kmeans++ initialization, this parameter specifies the number of
+        additional points to draw from the current distribution before selecting
+        the best. If a negative value is specified, a heuristic is used to
+        sample `O(log(num_to_sample))` additional points. Used only if
+        `initial_clusters=KMeansClustering.KMEANS_PLUS_PLUS_INIT`.
+      relative_tolerance: A relative tolerance of change in the loss between
+        iterations. Stops learning if the loss changes less than this amount.
+        This may not work correctly if `use_mini_batch=True`.
+      config: See `tf.estimator.Estimator`.
+      feature_columns: An optionable iterable containing all the feature columns
+        used by the model. All items in the set should be feature column
+        instances that can be passed to `tf.feature_column.input_layer`. If this
+        is None, all features will be used.
+
+    Raises:
+      ValueError: An invalid argument was passed to `initial_clusters` or
+        `distance_metric`.
+    """
+    if isinstance(initial_clusters, str) and initial_clusters not in [
+        KMeansClustering.RANDOM_INIT, KMeansClustering.KMEANS_PLUS_PLUS_INIT
+    ]:
+      raise ValueError("Unsupported initialization algorithm '%s'" %
+                       initial_clusters)
+    if distance_metric not in [
+        KMeansClustering.SQUARED_EUCLIDEAN_DISTANCE,
+        KMeansClustering.COSINE_DISTANCE
+    ]:
+      raise ValueError("Unsupported distance metric '%s'" % distance_metric)
+    self._distance_metric = distance_metric
+    super(KMeansClustering, self).__init__(
+        model_fn=_ModelFn(num_clusters, initial_clusters, distance_metric, seed,
+                          use_mini_batch, mini_batch_steps_per_iteration,
+                          kmeans_plus_plus_num_retries, relative_tolerance,
+                          feature_columns).model_fn,
+        model_dir=model_dir,
+        config=config)
+
+  def _predict_one_key(self, input_fn, predict_key):
+    for result in self.predict(input_fn=input_fn, predict_keys=[predict_key]):
+      yield result[predict_key]
+
+  def predict_cluster_index(self, input_fn):
+    """Finds the index of the closest cluster center to each input point.
+
+    Args:
+      input_fn: Input points. See `tf.estimator.Estimator.predict`.
+
+    Yields:
+      The index of the closest cluster center for each input point.
+    """
+    for index in self._predict_one_key(input_fn,
+                                       KMeansClustering.CLUSTER_INDEX):
+      yield index
+
+  def score(self, input_fn):
+    """Returns the sum of squared distances to nearest clusters.
+
+    Note that this function is different from the corresponding one in sklearn
+    which returns the negative sum.
+
+    Args:
+      input_fn: Input points. See `tf.estimator.Estimator.evaluate`. Only one
+        batch is retrieved.
+
+    Returns:
+      The sum of the squared distance from each point in the first batch of
+      inputs to its nearest cluster center.
+    """
+    return self.evaluate(input_fn=input_fn, steps=1)[KMeansClustering.SCORE]
+
+  def transform(self, input_fn):
+    """Transforms each input point to its distances to all cluster centers.
+
+    Note that if `distance_metric=KMeansClustering.SQUARED_EUCLIDEAN_DISTANCE`,
+    this
+    function returns the squared Euclidean distance while the corresponding
+    sklearn function returns the Euclidean distance.
+
+    Args:
+      input_fn: Input points. See `tf.estimator.Estimator.predict`.
+
+    Yields:
+      The distances from each input point to each cluster center.
+    """
+    for distances in self._predict_one_key(input_fn,
+                                           KMeansClustering.ALL_DISTANCES):
+      if self._distance_metric == KMeansClustering.SQUARED_EUCLIDEAN_DISTANCE:
+        yield np.sqrt(distances)
+      else:
+        yield distances
+
+  def cluster_centers(self):
+    """Returns the cluster centers."""
+    return self.get_variable_value(KMeansClustering.CLUSTER_CENTERS_VAR_NAME)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear.py
new file mode 100644
index 00000000..ea482c2b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear.py
@@ -0,0 +1,1675 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Linear Estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column
+from tensorflow.python.feature_column import feature_column_lib
+from tensorflow.python.feature_column import feature_column_v2 as fc_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.optimizer_v2 import ftrl as ftrl_v2
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.ops import resource_variable_ops
+from tensorflow.python.ops import variable_scope
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.canned import optimizers
+from tensorflow_estimator.python.estimator.canned.linear_optimizer.python.utils import sdca_ops
+from tensorflow_estimator.python.estimator.head import binary_class_head
+from tensorflow_estimator.python.estimator.head import head_utils
+from tensorflow_estimator.python.estimator.head import regression_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# The default learning rate of 0.2 is a historical artifact of the initial
+# implementation, but seems a reasonable choice.
+_LEARNING_RATE = 0.2
+
+
+@estimator_export('estimator.experimental.LinearSDCA')
+class LinearSDCA(object):
+  """Stochastic Dual Coordinate Ascent helper for linear estimators.
+
+  Objects of this class are intended to be provided as the optimizer argument
+  (though LinearSDCA objects do not implement the `tf.train.Optimizer`
+  interface)
+  when creating `tf.estimator.LinearClassifier` or
+  `tf.estimator.LinearRegressor`.
+
+  SDCA can only be used with `LinearClassifier` and `LinearRegressor` under the
+  following conditions:
+
+    - Feature columns are of type V2.
+    - Multivalent categorical columns are not normalized. In other words the
+      `sparse_combiner` argument in the estimator constructor should be "sum".
+    - For classification: binary label.
+    - For regression: one-dimensional label.
+
+  Example usage:
+
+  ```python
+  real_feature_column = numeric_column(...)
+  sparse_feature_column = categorical_column_with_hash_bucket(...)
+  linear_sdca = tf.estimator.experimental.LinearSDCA(
+      example_id_column='example_id',
+      num_loss_partitions=1,
+      num_table_shards=1,
+      symmetric_l2_regularization=2.0)
+  classifier = tf.estimator.LinearClassifier(
+      feature_columns=[real_feature_column, sparse_feature_column],
+      weight_column=...,
+      optimizer=linear_sdca)
+  classifier.train(input_fn_train, steps=50)
+  classifier.evaluate(input_fn=input_fn_eval)
+  ```
+
+  Here the expectation is that the `input_fn_*` functions passed to train and
+  evaluate return a pair (dict, label_tensor) where dict has `example_id_column`
+  as `key` whose value is a `Tensor` of shape [batch_size] and dtype string.
+  num_loss_partitions defines sigma' in eq (11) of [3]. Convergence of (global)
+  loss is guaranteed if `num_loss_partitions` is larger or equal to the product
+  `(#concurrent train ops/per worker) x (#workers)`. Larger values for
+  `num_loss_partitions` lead to slower convergence. The recommended value for
+  `num_loss_partitions` in `tf.estimator` (where currently there is one process
+  per worker) is the number of workers running the train steps. It defaults to 1
+  (single machine).
+  `num_table_shards` defines the number of shards for the internal state
+  table, typically set to match the number of parameter servers for large
+  data sets.
+
+  The SDCA algorithm was originally introduced in [1] and it was followed by
+  the L1 proximal step [2], a distributed version [3] and adaptive sampling [4].
+  [1] www.jmlr.org/papers/volume14/shalev-shwartz13a/shalev-shwartz13a.pdf
+  [2] https://arxiv.org/pdf/1309.2375.pdf
+  [3] https://arxiv.org/pdf/1502.03508.pdf
+  [4] https://arxiv.org/pdf/1502.08053.pdf
+  Details specific to this implementation are provided in:
+  https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator/python/estimator/canned/linear_optimizer/doc/sdca.ipynb
+  """
+
+  def __init__(self,
+               example_id_column,
+               num_loss_partitions=1,
+               num_table_shards=None,
+               symmetric_l1_regularization=0.0,
+               symmetric_l2_regularization=1.0,
+               adaptive=False):
+    """Construct a new SDCA optimizer for linear estimators.
+
+    Args:
+      example_id_column: The column name containing the example ids.
+      num_loss_partitions: Number of workers.
+      num_table_shards: Number of shards of the internal state table, typically
+        set to match the number of parameter servers.
+      symmetric_l1_regularization: A float value, must be greater than or equal
+        to zero.
+      symmetric_l2_regularization: A float value, must be greater than zero and
+        should typically be greater than 1.
+      adaptive: A boolean indicating whether to use adaptive sampling.
+    """
+
+    self._example_id_column = example_id_column
+    self._num_loss_partitions = num_loss_partitions
+    self._num_table_shards = num_table_shards
+    self._symmetric_l1_regularization = symmetric_l1_regularization
+    self._symmetric_l2_regularization = symmetric_l2_regularization
+    self._adaptive = adaptive
+
+  def _prune_and_unique_sparse_ids(self, id_weight_pair):
+    """Remove duplicate and negative ids in a sparse tendor."""
+
+    id_tensor = id_weight_pair.id_tensor
+    if id_weight_pair.weight_tensor:
+      weight_tensor = id_weight_pair.weight_tensor.values
+    else:
+      weight_tensor = tf.ones([tf.compat.v1.shape(id_tensor.indices)[0]],
+                              tf.dtypes.float32)
+
+    example_ids = tf.reshape(id_tensor.indices[:, 0], [-1])
+    flat_ids = tf.cast(
+        tf.reshape(id_tensor.values, [-1]), dtype=tf.dtypes.int64)
+    # Prune invalid IDs (< 0) from the flat_ids, example_ids, and
+    # weight_tensor.  These can come from looking up an OOV entry in the
+    # vocabulary (default value being -1).
+    is_id_valid = tf.math.greater_equal(flat_ids, 0)
+    flat_ids = tf.compat.v1.boolean_mask(flat_ids, is_id_valid)
+    example_ids = tf.compat.v1.boolean_mask(example_ids, is_id_valid)
+    weight_tensor = tf.compat.v1.boolean_mask(weight_tensor, is_id_valid)
+
+    projection_length = tf.math.reduce_max(flat_ids) + 1
+    # project ids based on example ids so that we can dedup ids that
+    # occur multiple times for a single example.
+    projected_ids = projection_length * example_ids + flat_ids
+
+    # Remove any redundant ids.
+    ids, idx = tf.unique(projected_ids)
+    # Keep only one example id per duplicated ids.
+    example_ids_filtered = tf.math.unsorted_segment_min(
+        example_ids, idx,
+        tf.compat.v1.shape(ids)[0])
+
+    # reproject ids back feature id space.
+    reproject_ids = (ids - projection_length * example_ids_filtered)
+
+    weights = tf.reshape(
+        tf.math.unsorted_segment_sum(weight_tensor, idx,
+                                     tf.compat.v1.shape(ids)[0]), [-1])
+    return sdca_ops._SparseFeatureColumn(  # pylint: disable=protected-access
+        example_ids_filtered, reproject_ids, weights)
+
+  def get_train_step(self, state_manager, weight_column_name, loss_type,
+                     feature_columns, features, targets, bias_var, global_step):
+    """Returns the training operation of an SdcaModel optimizer."""
+
+    batch_size = tf.compat.v1.shape(targets)[0]
+    cache = feature_column_lib.FeatureTransformationCache(features)
+
+    # Iterate over all feature columns and create appropriate lists for dense
+    # and sparse features as well as dense and sparse weights (variables) for
+    # SDCA.
+    dense_features, dense_feature_weights = [], []
+    sparse_feature_with_values, sparse_feature_with_values_weights = [], []
+    for column in sorted(feature_columns, key=lambda x: x.name):
+      if isinstance(column, feature_column_lib.CategoricalColumn):
+        id_weight_pair = column.get_sparse_tensors(cache, state_manager)
+        sparse_feature_with_values.append(
+            self._prune_and_unique_sparse_ids(id_weight_pair))
+        # If a partitioner was used during variable creation, we will have a
+        # list of Variables here larger than 1.
+        sparse_feature_with_values_weights.append(
+            state_manager.get_variable(column, 'weights'))
+      elif isinstance(column, feature_column_lib.DenseColumn):
+        if column.variable_shape.ndims != 1:
+          raise ValueError('Column %s has rank %d, larger than 1.' %
+                           (type(column).__name__, column.variable_shape.ndims))
+        dense_features.append(column.get_dense_tensor(cache, state_manager))
+        # For real valued columns, the variables list contains exactly one
+        # element.
+        dense_feature_weights.append(
+            state_manager.get_variable(column, 'weights'))
+      else:
+        raise ValueError('LinearSDCA does not support column type %s.' %
+                         type(column).__name__)
+
+    # Add the bias column
+    dense_features.append(tf.ones([batch_size, 1]))
+    dense_feature_weights.append(bias_var)
+
+    example_weights = tf.reshape(
+        features[weight_column_name],
+        shape=[-1]) if weight_column_name else tf.ones([batch_size])
+    example_ids = features[self._example_id_column]
+    training_examples = dict(
+        sparse_features=sparse_feature_with_values,
+        dense_features=dense_features,
+        example_labels=tf.compat.v1.to_float(tf.reshape(targets, shape=[-1])),
+        example_weights=example_weights,
+        example_ids=example_ids)
+    training_variables = dict(
+        sparse_features_weights=sparse_feature_with_values_weights,
+        dense_features_weights=dense_feature_weights)
+    sdca_model = sdca_ops._SDCAModel(  # pylint: disable=protected-access
+        examples=training_examples,
+        variables=training_variables,
+        options=dict(
+            symmetric_l1_regularization=self._symmetric_l1_regularization,
+            symmetric_l2_regularization=self._symmetric_l2_regularization,
+            adaptive=self._adaptive,
+            num_loss_partitions=self._num_loss_partitions,
+            num_table_shards=self._num_table_shards,
+            loss_type=loss_type))
+    train_op = sdca_model.minimize(global_step=global_step)
+    return sdca_model, train_op
+
+
+def _get_default_optimizer_v2(feature_columns):
+  learning_rate = min(_LEARNING_RATE, 1.0 / math.sqrt(len(feature_columns)))
+  return ftrl_v2.Ftrl(learning_rate=learning_rate)
+
+
+def _get_default_optimizer(feature_columns):
+  learning_rate = min(_LEARNING_RATE, 1.0 / math.sqrt(len(feature_columns)))
+  return tf.compat.v1.train.FtrlOptimizer(learning_rate=learning_rate)
+
+
+def _get_expanded_variable_list(var_list):
+  """Given an iterable of variables, expands them if they are partitioned.
+
+  Args:
+    var_list: An iterable of variables.
+
+  Returns:
+    A list of variables where each partitioned variable is expanded to its
+    components.
+  """
+  returned_list = []
+  for variable in var_list:
+    if (isinstance(variable, tf.Variable) or
+        resource_variable_ops.is_resource_variable(variable) or
+        isinstance(variable, tf.Tensor)):
+      returned_list.append(variable)  # Single variable/tensor case.
+    else:  # Must be a PartitionedVariable, so convert into a list.
+      returned_list.extend(list(variable))
+  return returned_list
+
+
+# TODO(rohanj): Consider making this a public utility method.
+def _compute_fraction_of_zero(variables):
+  """Given a linear variables list, compute the fraction of zero weights.
+
+  Args:
+    variables: A list or list of list of variables
+
+  Returns:
+    The fraction of zeros (sparsity) in the linear model.
+  """
+  with ops.name_scope('zero_fraction'):
+    variables = tf.nest.flatten(variables)
+
+    with ops.name_scope('total_size'):
+      sizes = [
+          tf.compat.v1.size(x, out_type=tf.dtypes.int64) for x in variables
+      ]
+      total_size_int64 = tf.math.add_n(sizes)
+    with ops.name_scope('total_zero'):
+      total_zero_float32 = tf.math.add_n([
+          tf.compat.v1.cond(
+              tf.math.equal(size, tf.constant(0, dtype=tf.dtypes.int64)),
+              true_fn=lambda: tf.constant(0, dtype=tf.dtypes.float32),
+              false_fn=lambda: tf.math.zero_fraction(x) * tf.cast(
+                  size, dtype=tf.dtypes.float32),
+              name='zero_count') for x, size in zip(variables, sizes)
+      ])
+
+    with ops.name_scope('compute'):
+      total_size_float32 = tf.cast(
+          total_size_int64, dtype=tf.dtypes.float32, name='float32_size')
+      zero_fraction_or_nan = total_zero_float32 / total_size_float32
+
+    zero_fraction_or_nan = tf.identity(
+        zero_fraction_or_nan, name='zero_fraction_or_nan')
+    return zero_fraction_or_nan
+
+
+def linear_logit_fn_builder_v2(units, feature_columns, sparse_combiner='sum'):
+  """Function builder for a linear logit_fn.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    sparse_combiner: A string specifying how to reduce if a categorical column
+      is multivalent.  One of "mean", "sqrtn", and "sum".
+
+  Returns:
+    A logit_fn (see below).
+
+  """
+
+  def linear_logit_fn(features):
+    """Linear model logit_fn.
+
+    Args:
+      features: This is the first item returned from the `input_fn` passed to
+        `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+        `dict` of same.
+
+    Returns:
+      A `Tensor` representing the logits.
+    """
+    if not feature_column_lib.is_feature_column_v2(feature_columns):
+      raise ValueError(
+          'Received a feature column from TensorFlow v1, but this is a '
+          'TensorFlow v2 Estimator. Please either use v2 feature columns '
+          '(accessible via tf.feature_column.* in TF 2.x) with this '
+          'Estimator, or switch to a v1 Estimator for use with v1 feature '
+          'columns (accessible via tf.compat.v1.estimator.* and '
+          'tf.compat.v1.feature_column.*, respectively.')
+
+    linear_model = LinearModel(
+        feature_columns=feature_columns,
+        units=units,
+        sparse_combiner=sparse_combiner,
+        name='linear_model')
+    logits = linear_model(features)
+    bias = linear_model.bias
+
+    # We'd like to get all the non-bias variables associated with this
+    # LinearModel.
+    # TODO(rohanj): Figure out how to get shared embedding weights variable
+    # here.
+    variables = linear_model.variables
+    variables.remove(bias)
+
+    # Expand (potential) Partitioned variables
+    bias = _get_expanded_variable_list([bias])
+    variables = _get_expanded_variable_list(variables)
+
+    if units > 1:
+      tf.compat.v1.summary.histogram('bias', bias)
+    else:
+      # If units == 1, the bias value is a length-1 list of a scalar Tensor,
+      # so we should provide a scalar summary.
+      tf.compat.v1.summary.scalar('bias', bias[0][0])
+    tf.compat.v1.summary.scalar('fraction_of_zero_weights',
+                                _compute_fraction_of_zero(variables))
+    return logits
+
+  return linear_logit_fn
+
+
+@estimator_export(v1=['estimator.experimental.linear_logit_fn_builder'])
+def linear_logit_fn_builder(units, feature_columns, sparse_combiner='sum'):
+  """Function builder for a linear logit_fn.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    sparse_combiner: A string specifying how to reduce if a categorical column
+      is multivalent.  One of "mean", "sqrtn", and "sum".
+
+  Returns:
+    A logit_fn (see below).
+
+  """
+
+  def linear_logit_fn(features):
+    """Linear model logit_fn.
+
+    Args:
+      features: This is the first item returned from the `input_fn` passed to
+        `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+        `dict` of same.
+
+    Returns:
+      A `Tensor` representing the logits.
+    """
+    if feature_column_lib.is_feature_column_v2(feature_columns):
+      linear_model = LinearModel(
+          feature_columns=feature_columns,
+          units=units,
+          sparse_combiner=sparse_combiner,
+          name='linear_model')
+      logits = linear_model(features)
+
+      # We'd like to get all the non-bias variables associated with this
+      # LinearModel.
+      # TODO(rohanj): Figure out how to get shared embedding weights variable
+      # here.
+      bias = linear_model.bias
+      variables = linear_model.variables
+      # Expand (potential) Partitioned variables
+      bias = _get_expanded_variable_list([bias])
+      variables = _get_expanded_variable_list(variables)
+      variables = [var for var in variables if var not in bias]
+
+      # Expand (potential) Partitioned variables
+      bias = _get_expanded_variable_list([bias])
+    else:
+      linear_model = feature_column._LinearModel(  # pylint: disable=protected-access
+          feature_columns=feature_columns,
+          units=units,
+          sparse_combiner=sparse_combiner,
+          name='linear_model')
+      logits = linear_model(features)
+      cols_to_vars = linear_model.cols_to_vars()
+      bias = cols_to_vars.pop('bias')
+      variables = cols_to_vars.values()
+      variables = _get_expanded_variable_list(variables)
+
+    if units > 1:
+      tf.compat.v1.summary.histogram('bias', bias)
+    else:
+      # If units == 1, the bias value is a length-1 list of a scalar Tensor,
+      # so we should provide a scalar summary.
+      tf.compat.v1.summary.scalar('bias', bias[0][0])
+    tf.compat.v1.summary.scalar('fraction_of_zero_weights',
+                                _compute_fraction_of_zero(variables))
+    return logits
+
+  return linear_logit_fn
+
+
+def _sdca_model_fn(features, labels, mode, head, feature_columns, optimizer):
+  """A model_fn for linear models that use the SDCA optimizer.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape `[batch_size]`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    optimizer: a `LinearSDCA` instance.
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: mode or params are invalid, or features has the wrong type.
+  """
+  assert feature_column_lib.is_feature_column_v2(feature_columns)
+  if isinstance(head,
+                (binary_class_head.BinaryClassHead,
+                 head_lib._BinaryLogisticHeadWithSigmoidCrossEntropyLoss)):  # pylint: disable=protected-access
+    loss_type = 'logistic_loss'
+  elif isinstance(head, (regression_head.RegressionHead,
+                         head_lib._RegressionHeadWithMeanSquaredErrorLoss)):  # pylint: disable=protected-access
+    assert head.logits_dimension == 1
+    loss_type = 'squared_loss'
+  else:
+    raise ValueError('Unsupported head type: {}'.format(head))
+
+  # The default name for LinearModel.
+  linear_model_name = 'linear_model'
+
+  # Name scope has no effect on variables in LinearModel, as it uses
+  # tf.get_variables() for variable creation. So we modify the model name to
+  # keep the variable names the same for checkpoint backward compatibility in
+  # canned Linear v2.
+  if isinstance(
+      head,
+      (binary_class_head.BinaryClassHead, regression_head.RegressionHead)):
+    linear_model_name = 'linear/linear_model'
+
+  linear_model = LinearModel(
+      feature_columns=feature_columns,
+      units=1,
+      sparse_combiner='sum',
+      name=linear_model_name)
+  logits = linear_model(features)
+
+  # We'd like to get all the non-bias variables associated with this
+  # LinearModel.
+  # TODO(rohanj): Figure out how to get shared embedding weights variable
+  # here.
+  bias = linear_model.bias
+  variables = linear_model.variables
+  # Expand (potential) Partitioned variables
+  bias = _get_expanded_variable_list([bias])
+  variables = _get_expanded_variable_list(variables)
+  variables = [var for var in variables if var not in bias]
+
+  tf.compat.v1.summary.scalar('bias', bias[0][0])
+  tf.compat.v1.summary.scalar('fraction_of_zero_weights',
+                              _compute_fraction_of_zero(variables))
+
+  if mode == ModeKeys.TRAIN:
+    sdca_model, train_op = optimizer.get_train_step(
+        linear_model.layer._state_manager,  # pylint: disable=protected-access
+        head._weight_column,  # pylint: disable=protected-access
+        loss_type,
+        feature_columns,
+        features,
+        labels,
+        linear_model.bias,
+        tf.compat.v1.train.get_global_step())
+
+    update_weights_hook = _SDCAUpdateWeightsHook(sdca_model, train_op)
+
+    model_fn_ops = head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        train_op_fn=lambda unused_loss_fn: train_op,
+        logits=logits)
+    return model_fn_ops._replace(
+        training_chief_hooks=(model_fn_ops.training_chief_hooks +
+                              (update_weights_hook,)))
+  else:
+    return head.create_estimator_spec(
+        features=features, mode=mode, labels=labels, logits=logits)
+
+
+class _SDCAUpdateWeightsHook(tf.compat.v1.train.SessionRunHook):
+  """SessionRunHook to update and shrink SDCA model weights."""
+
+  def __init__(self, sdca_model, train_op):
+    self._sdca_model = sdca_model
+    self._train_op = train_op
+
+  def begin(self):
+    """Construct the update_weights op.
+
+    The op is implicitly added to the default graph.
+    """
+    self._update_op = self._sdca_model.update_weights(self._train_op)
+
+  def before_run(self, run_context):
+    """Return the update_weights op so that it is executed during this run."""
+    return tf.compat.v1.train.SessionRunArgs(self._update_op)
+
+
+def _linear_model_fn_builder_v2(units,
+                                feature_columns,
+                                sparse_combiner='sum',
+                                features=None):
+  """Function builder for a linear model_fn.
+
+  Args:
+    units: An int indicating the dimension of the logit layer.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    sparse_combiner: A string specifying how to reduce if a categorical column
+      is multivalent.  One of "mean", "sqrtn", and "sum".
+    features: This is the first item returned from the `input_fn` passed to
+      `train`, `evaluate`, and `predict`. This should be a single `Tensor` or
+      `dict` of same.
+
+  Returns:
+    A `Tensor` representing the logits.
+    A list of trainable variables.
+
+  """
+  if not feature_column_lib.is_feature_column_v2(feature_columns):
+    raise ValueError(
+        'Received a feature column from TensorFlow v1, but this is a '
+        'TensorFlow v2 Estimator. Please either use v2 feature columns '
+        '(accessible via tf.feature_column.* in TF 2.x) with this '
+        'Estimator, or switch to a v1 Estimator for use with v1 feature '
+        'columns (accessible via tf.compat.v1.estimator.* and '
+        'tf.compat.v1.feature_column.*, respectively.')
+
+  # Name scope has no effect on variables in LinearModel, as it uses
+  # tf.get_variables() for variable creation. So we modify the model name to
+  # keep the variable names the same for checkpoint backward compatibility.
+  linear_model = LinearModel(
+      feature_columns=feature_columns,
+      units=units,
+      sparse_combiner=sparse_combiner,
+      name='linear/linear_model')
+  logits = linear_model(features)
+  bias = linear_model.bias
+
+  # We'd like to get all the non-bias variables associated with this
+  # LinearModel.
+  # TODO(rohanj): Figure out how to get shared embedding weights variable
+  # here.
+  variables = linear_model.variables
+  variables.remove(bias)
+
+  if units > 1:
+    tf.compat.v1.summary.histogram('bias', bias)
+  else:
+    # If units == 1, the bias value is a length-1 list of a scalar Tensor,
+    # so we should provide a scalar summary.
+    tf.compat.v1.summary.scalar('bias', bias[0])
+  tf.compat.v1.summary.scalar('fraction_of_zero_weights',
+                              _compute_fraction_of_zero(variables))
+
+  return logits, linear_model.variables
+
+
+def _linear_model_fn_v2(features,
+                        labels,
+                        mode,
+                        head,
+                        feature_columns,
+                        optimizer,
+                        config,
+                        sparse_combiner='sum'):
+  """A model_fn for linear models that use a gradient-based optimizer.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape `[batch_size, logits_dimension]`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training. If `None`, will use a FTRL optimizer.
+    config: `RunConfig` object to configure the runtime settings.
+    sparse_combiner: A string specifying how to reduce if a categorical column
+      is multivalent.  One of "mean", "sqrtn", and "sum".
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: mode or params are invalid, or features has the wrong type.
+  """
+  if not isinstance(features, dict):
+    raise ValueError('features should be a dictionary of `Tensor`s. '
+                     'Given type: {}'.format(type(features)))
+
+  del config
+
+  if isinstance(optimizer, LinearSDCA):
+    assert sparse_combiner == 'sum'
+    return _sdca_model_fn(features, labels, mode, head, feature_columns,
+                          optimizer)
+  else:
+    logits, trainable_variables = _linear_model_fn_builder_v2(
+        units=head.logits_dimension,
+        feature_columns=feature_columns,
+        sparse_combiner=sparse_combiner,
+        features=features)
+
+    # In TRAIN mode, create optimizer and assign global_step variable to
+    # optimizer.iterations to make global_step increased correctly, as Hooks
+    # relies on global step as step counter.
+    if mode == ModeKeys.TRAIN:
+      optimizer = optimizers.get_optimizer_instance_v2(
+          optimizer or _get_default_optimizer_v2(feature_columns),
+          learning_rate=_LEARNING_RATE)
+      optimizer.iterations = tf.compat.v1.train.get_or_create_global_step()
+
+    return head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=optimizer,
+        trainable_variables=trainable_variables,
+        logits=logits)
+
+
+def _linear_model_fn(features,
+                     labels,
+                     mode,
+                     head,
+                     feature_columns,
+                     optimizer,
+                     partitioner,
+                     config,
+                     sparse_combiner='sum'):
+  """A model_fn for linear models that use a gradient-based optimizer.
+
+  Args:
+    features: dict of `Tensor`.
+    labels: `Tensor` of shape `[batch_size, logits_dimension]`.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    feature_columns: An iterable containing all the feature columns used by the
+      model.
+    optimizer: string, `Optimizer` object, or callable that defines the
+      optimizer to use for training. If `None`, will use a FTRL optimizer.
+    partitioner: Partitioner for variables.
+    config: `RunConfig` object to configure the runtime settings.
+    sparse_combiner: A string specifying how to reduce if a categorical column
+      is multivalent.  One of "mean", "sqrtn", and "sum".
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: mode or params are invalid, or features has the wrong type.
+  """
+  if not isinstance(features, dict):
+    raise ValueError('features should be a dictionary of `Tensor`s. '
+                     'Given type: {}'.format(type(features)))
+
+  num_ps_replicas = config.num_ps_replicas if config else 0
+
+  partitioner = partitioner or (tf.compat.v1.min_max_variable_partitioner(
+      max_partitions=num_ps_replicas, min_slice_size=64 << 20))
+
+  with tf.compat.v1.variable_scope(
+      'linear', values=tuple(six.itervalues(features)),
+      partitioner=partitioner):
+
+    if isinstance(optimizer, LinearSDCA):
+      assert sparse_combiner == 'sum'
+      return _sdca_model_fn(features, labels, mode, head, feature_columns,
+                            optimizer)
+    else:
+      logit_fn = linear_logit_fn_builder(
+          units=head.logits_dimension,
+          feature_columns=feature_columns,
+          sparse_combiner=sparse_combiner,
+      )
+      logits = logit_fn(features=features)
+
+      optimizer = optimizers.get_optimizer_instance(
+          optimizer or _get_default_optimizer(feature_columns),
+          learning_rate=_LEARNING_RATE)
+
+      return head.create_estimator_spec(
+          features=features,
+          mode=mode,
+          labels=labels,
+          optimizer=optimizer,
+          logits=logits)
+
+
+def _validate_linear_sdca_optimizer_for_linear_classifier(
+    feature_columns, n_classes, optimizer, sparse_combiner):
+  """Helper function for the initialization of LinearClassifier."""
+  if isinstance(optimizer, LinearSDCA):
+    if sparse_combiner != 'sum':
+      raise ValueError('sparse_combiner must be "sum" when optimizer '
+                       'is a LinearSDCA object.')
+    if not feature_column_lib.is_feature_column_v2(feature_columns):
+      raise ValueError('V2 feature columns required when optimizer '
+                       'is a LinearSDCA object.')
+    if n_classes > 2:
+      raise ValueError('LinearSDCA cannot be used in a multi-class setting.')
+
+
+@estimator_export('estimator.LinearClassifier', v1=[])
+class LinearClassifierV2(estimator.EstimatorV2):
+  """Linear classifier model.
+
+  Train a linear model to classify instances into one of multiple possible
+  classes. When number of possible classes is 2, this is binary classification.
+
+  Example:
+
+  ```python
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+
+  # Estimator using the default optimizer.
+  estimator = tf.estimator.LinearClassifier(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b])
+
+  # Or estimator using the FTRL optimizer with regularization.
+  estimator = tf.estimator.LinearClassifier(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      optimizer=tf.keras.optimizers.Ftrl(
+        learning_rate=0.1,
+        l1_regularization_strength=0.001
+      ))
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.LinearClassifier(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      optimizer=lambda: tf.keras.optimizers.Ftrl(
+          learning_rate=tf.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator with warm-starting from a previous checkpoint.
+  estimator = tf.estimator.LinearClassifier(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      warm_start_from="/path/to/checkpoint/dir")
+
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train)
+  metrics = estimator.evaluate(input_fn=input_fn_eval)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+    otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `SparseColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedSparseColumn`, two features: the first with
+      `key` the id column name, the second with `key` the weight column name.
+      Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `RealValuedColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using softmax cross entropy.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               feature_columns,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               optimizer='Ftrl',
+               config=None,
+               warm_start_from=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               sparse_combiner='sum'):
+    """Construct a `LinearClassifier` estimator object.
+
+    Args:
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      n_classes: number of label classes. Default is binary classification. Note
+        that class labels are integers representing the class index (i.e. values
+        from 0 to n_classes-1). For arbitrary label values (e.g. string labels),
+        convert to class indices first.
+      weight_column: A string or a `_NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      label_vocabulary: A list of strings represents possible label values. If
+        given, labels must be string type and have any value in
+        `label_vocabulary`. If it is not given, that means labels are already
+        encoded as integer or float within [0, 1] for `n_classes=2` and encoded
+        as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also
+        there will be errors if vocabulary is not provided and labels are
+        string.
+      optimizer: An instance of `tf.keras.optimizers.*` or
+        `tf.estimator.experimental.LinearSDCA` used to train the model. Can also
+        be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or
+        callable. Defaults to FTRL optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights and biases are warm-started, and it is assumed that vocabularies
+        and Tensor names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      sparse_combiner: A string specifying how to reduce if a categorical column
+        is multivalent.  One of "mean", "sqrtn", and "sum" -- these are
+        effectively different ways to do example-level normalization, which can
+        be useful for bag-of-words features. for more details, see
+        `tf.feature_column.linear_model`.
+
+    Returns:
+      A `LinearClassifier` estimator.
+
+    Raises:
+      ValueError: if n_classes < 2.
+    """
+    _validate_linear_sdca_optimizer_for_linear_classifier(
+        feature_columns=feature_columns,
+        n_classes=n_classes,
+        optimizer=optimizer,
+        sparse_combiner=sparse_combiner)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set('Linear')  # pylint: disable=protected-access
+
+    head = head_utils.binary_or_multi_class_head(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _linear_model_fn."""
+      return _linear_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    super(LinearClassifierV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.LinearClassifier'])  # pylint: disable=missing-docstring
+class LinearClassifier(estimator.Estimator):
+  __doc__ = LinearClassifierV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               feature_columns,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               optimizer='Ftrl',
+               config=None,
+               partitioner=None,
+               warm_start_from=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               sparse_combiner='sum'):
+    _validate_linear_sdca_optimizer_for_linear_classifier(
+        feature_columns=feature_columns,
+        n_classes=n_classes,
+        optimizer=optimizer,
+        sparse_combiner=sparse_combiner)
+    estimator._canned_estimator_api_gauge.get_cell('Classifier').set('Linear')  # pylint: disable=protected-access
+
+    head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
+        n_classes, weight_column, label_vocabulary, loss_reduction)
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _linear_model_fn."""
+      return _linear_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          partitioner=partitioner,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    super(LinearClassifier, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export('estimator.LinearEstimator', v1=[])
+class LinearEstimatorV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow linear models with user-specified head.
+
+  Example:
+
+  ```python
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+
+  # Estimator using the default optimizer.
+  estimator = tf.estimator.LinearEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b])
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.LinearEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      optimizer=lambda: tf.keras.optimizers.Ftrl(
+          learning_rate=tf.compat.v1.train.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.compat.v1.train.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator using the FTRL optimizer with regularization.
+  estimator = tf.estimator.LinearEstimator(
+      head=tf.estimator.MultiLabelHead(n_classes=3),
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b])
+      optimizer=tf.keras.optimizers.Ftrl(
+          learning_rate=0.1,
+          l1_regularization_strength=0.001
+      ))
+
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss and predicted output are determined by the specified head.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               head,
+               feature_columns,
+               model_dir=None,
+               optimizer='Ftrl',
+               config=None,
+               sparse_combiner='sum',
+               warm_start_from=None):
+    """Initializes a `LinearEstimator` instance.
+
+    Args:
+      head: A `Head` instance constructed with a method such as
+        `tf.estimator.MultiLabelHead`.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      optimizer: An instance of `tf.keras.optimizers.*` used to train the model.
+        Can also be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp',
+        'SGD'), or callable. Defaults to FTRL optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      sparse_combiner: A string specifying how to reduce if a categorical column
+        is multivalent.  One of "mean", "sqrtn", and "sum" -- these are
+        effectively different ways to do example-level normalization, which can
+        be useful for bag-of-words features. for more details, see
+        `tf.feature_column.linear_model`.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights and biases are warm-started, and it is assumed that vocabularies
+        and Tensor names are unchanged.
+    """
+
+    def _model_fn(features, labels, mode, config):
+      return _linear_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set('Linear')  # pylint: disable=protected-access
+    super(LinearEstimatorV2, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.LinearEstimator'])  # pylint: disable=missing-docstring
+class LinearEstimator(estimator.Estimator):
+  __doc__ = LinearEstimatorV2.__doc__
+
+  def __init__(self,
+               head,
+               feature_columns,
+               model_dir=None,
+               optimizer='Ftrl',
+               config=None,
+               partitioner=None,
+               sparse_combiner='sum',
+               warm_start_from=None):
+    """Initializes a `LinearEstimator` instance.
+
+    Args:
+      head: A `_Head` instance constructed with a method such as
+        `tf.contrib.estimator.multi_label_head`.
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      optimizer: An instance of `tf.Optimizer` used to train the model. Can also
+        be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or
+        callable. Defaults to FTRL optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      partitioner: Optional. Partitioner for input layer.
+      sparse_combiner: A string specifying how to reduce if a categorical column
+        is multivalent.  One of "mean", "sqrtn", and "sum" -- these are
+        effectively different ways to do example-level normalization, which can
+        be useful for bag-of-words features. for more details, see
+        `tf.feature_column.linear_model`.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights and biases are warm-started, and it is assumed that vocabularies
+        and Tensor names are unchanged.
+    """
+
+    def _model_fn(features, labels, mode, config):
+      return _linear_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          partitioner=partitioner,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    estimator._canned_estimator_api_gauge.get_cell('Estimator').set('Linear')  # pylint: disable=protected-access
+    super(LinearEstimator, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config,
+        warm_start_from=warm_start_from)
+
+
+def _validate_linear_sdca_optimizer_for_linear_regressor(
+    feature_columns, label_dimension, optimizer, sparse_combiner):
+  """Helper function for the initialization of LinearRegressor."""
+  if isinstance(optimizer, LinearSDCA):
+    if sparse_combiner != 'sum':
+      raise ValueError('sparse_combiner must be "sum" when optimizer '
+                       'is a LinearSDCA object.')
+    if not feature_column_lib.is_feature_column_v2(feature_columns):
+      raise ValueError('V2 feature columns required when optimizer '
+                       'is a LinearSDCA object.')
+    if label_dimension > 1:
+      raise ValueError('LinearSDCA can only be used with one-dimensional '
+                       'label.')
+
+
+@estimator_export('estimator.LinearRegressor', v1=[])
+class LinearRegressorV2(estimator.EstimatorV2):
+  """An estimator for TensorFlow Linear regression problems.
+
+  Train a linear regression model to predict label value given observation of
+  feature values.
+
+  Example:
+
+  ```python
+  categorical_column_a = categorical_column_with_hash_bucket(...)
+  categorical_column_b = categorical_column_with_hash_bucket(...)
+
+  categorical_feature_a_x_categorical_feature_b = crossed_column(...)
+
+  # Estimator using the default optimizer.
+  estimator = tf.estimator.LinearRegressor(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b])
+
+  # Or estimator using the FTRL optimizer with regularization.
+  estimator = tf.estimator.LinearRegressor(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      optimizer=tf.keras.optimizers.Ftrl(
+        learning_rate=0.1,
+        l1_regularization_strength=0.001
+      ))
+
+  # Or estimator using an optimizer with a learning rate decay.
+  estimator = tf.estimator.LinearRegressor(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      optimizer=lambda: tf.keras.optimizers.Ftrl(
+          learning_rate=tf.compat.v1.train.exponential_decay(
+              learning_rate=0.1,
+              global_step=tf.compat.v1.train.get_global_step(),
+              decay_steps=10000,
+              decay_rate=0.96))
+
+  # Or estimator with warm-starting from a previous checkpoint.
+  estimator = tf.estimator.LinearRegressor(
+      feature_columns=[categorical_column_a,
+                       categorical_feature_a_x_categorical_feature_b],
+      warm_start_from="/path/to/checkpoint/dir")
+
+
+  # Input builders
+  def input_fn_train:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_eval:
+    # Returns tf.data.Dataset of (x, y) tuple where y represents label's class
+    # index.
+    pass
+  def input_fn_predict:
+    # Returns tf.data.Dataset of (x, None) tuple.
+    pass
+  estimator.train(input_fn=input_fn_train)
+  metrics = estimator.evaluate(input_fn=input_fn_eval)
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+    otherwise there will be a KeyError:
+
+  * if `weight_column` is not `None`, a feature with `key=weight_column` whose
+    value is a `Tensor`.
+  * for each `column` in `feature_columns`:
+    - if `column` is a `SparseColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedSparseColumn`, two features: the first with
+      `key` the id column name, the second with `key` the weight column name.
+      Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `RealValuedColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using mean squared error.
+
+  @compatibility(eager)
+  Estimators can be used while eager execution is enabled. Note that `input_fn`
+  and all hooks are executed inside a graph context, so they have to be written
+  to be compatible with graph mode. Note that `input_fn` code using `tf.data`
+  generally works in both graph and eager modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               feature_columns,
+               model_dir=None,
+               label_dimension=1,
+               weight_column=None,
+               optimizer='Ftrl',
+               config=None,
+               warm_start_from=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               sparse_combiner='sum'):
+    """Initializes a `LinearRegressor` instance.
+
+    Args:
+      feature_columns: An iterable containing all the feature columns used by
+        the model. All items in the set should be instances of classes derived
+        from `FeatureColumn`.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      label_dimension: Number of regression targets per example. This is the
+        size of the last dimension of the labels and logits `Tensor` objects
+        (typically, these have shape `[batch_size, label_dimension]`).
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      optimizer: An instance of `tf.keras.optimizers.*` or
+        `tf.estimator.experimental.LinearSDCA` used to train the model. Can also
+        be a string (one of 'Adagrad', 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or
+        callable. Defaults to FTRL optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+      warm_start_from: A string filepath to a checkpoint to warm-start from, or
+        a `WarmStartSettings` object to fully configure warm-starting.  If the
+        string filepath is provided instead of a `WarmStartSettings`, then all
+        weights and biases are warm-started, and it is assumed that vocabularies
+        and Tensor names are unchanged.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM`.
+      sparse_combiner: A string specifying how to reduce if a categorical column
+        is multivalent.  One of "mean", "sqrtn", and "sum" -- these are
+        effectively different ways to do example-level normalization, which can
+        be useful for bag-of-words features. for more details, see
+        `tf.feature_column.linear_model`.
+    """
+    _validate_linear_sdca_optimizer_for_linear_regressor(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        optimizer=optimizer,
+        sparse_combiner=sparse_combiner)
+
+    head = regression_head.RegressionHead(
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set('Linear')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _linear_model_fn."""
+      return _linear_model_fn_v2(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    super(LinearRegressorV2, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+@estimator_export(v1=['estimator.LinearRegressor'])  # pylint: disable=missing-docstring
+class LinearRegressor(estimator.Estimator):
+  __doc__ = LinearRegressorV2.__doc__.replace('SUM_OVER_BATCH_SIZE', 'SUM')
+
+  def __init__(self,
+               feature_columns,
+               model_dir=None,
+               label_dimension=1,
+               weight_column=None,
+               optimizer='Ftrl',
+               config=None,
+               partitioner=None,
+               warm_start_from=None,
+               loss_reduction=tf.compat.v1.losses.Reduction.SUM,
+               sparse_combiner='sum'):
+    _validate_linear_sdca_optimizer_for_linear_regressor(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        optimizer=optimizer,
+        sparse_combiner=sparse_combiner)
+
+    head = head_lib._regression_head(  # pylint: disable=protected-access
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction)
+    estimator._canned_estimator_api_gauge.get_cell('Regressor').set('Linear')  # pylint: disable=protected-access
+
+    def _model_fn(features, labels, mode, config):
+      """Call the defined shared _linear_model_fn."""
+      return _linear_model_fn(
+          features=features,
+          labels=labels,
+          mode=mode,
+          head=head,
+          feature_columns=tuple(feature_columns or []),
+          optimizer=optimizer,
+          partitioner=partitioner,
+          config=config,
+          sparse_combiner=sparse_combiner)
+
+    super(LinearRegressor, self).__init__(
+        model_fn=_model_fn,
+        model_dir=model_dir,
+        config=config,
+        warm_start_from=warm_start_from)
+
+
+class _LinearModelLayer(tf.keras.layers.Layer):
+  """Layer that contains logic for `LinearModel`."""
+
+  def __init__(self,
+               feature_columns,
+               units=1,
+               sparse_combiner='sum',
+               trainable=True,
+               name=None,
+               **kwargs):
+    super(_LinearModelLayer, self).__init__(
+        name=name, trainable=trainable, **kwargs)
+
+    self._feature_columns = fc_v2._normalize_feature_columns(feature_columns)  # pylint: disable=protected-access
+    for column in self._feature_columns:
+      if not isinstance(column, (fc_v2.DenseColumn, fc_v2.CategoricalColumn)):
+        raise ValueError(
+            'Items of feature_columns must be either a '
+            'DenseColumn or CategoricalColumn. Given: {}'.format(column))
+
+    self._units = units
+    self._sparse_combiner = sparse_combiner
+
+    self._state_manager = fc_v2._StateManagerImpl(self, self.trainable)  # pylint: disable=protected-access
+    self.bias = None
+
+  def build(self, _):
+    # We need variable scopes for now because we want the variable partitioning
+    # information to percolate down. We also use _pure_variable_scope's here
+    # since we want to open up a name_scope in the `call` method while creating
+    # the ops.
+    with variable_scope._pure_variable_scope(self.name):  # pylint: disable=protected-access
+      for column in self._feature_columns:
+        with variable_scope._pure_variable_scope(  # pylint: disable=protected-access
+            fc_v2._sanitize_column_name_for_variable_scope(column.name)):  # pylint: disable=protected-access
+          # Create the state for each feature column
+          column.create_state(self._state_manager)
+
+          # Create a weight variable for each column.
+          if isinstance(column, fc_v2.CategoricalColumn):
+            first_dim = column.num_buckets
+          else:
+            first_dim = column.variable_shape.num_elements()
+          self._state_manager.create_variable(
+              column,
+              name='weights',
+              dtype=tf.float32,
+              shape=(first_dim, self._units),
+              initializer=tf.keras.initializers.zeros(),
+              trainable=self.trainable)
+
+      # Create a bias variable.
+      self.bias = self.add_variable(
+          name='bias_weights',
+          dtype=tf.float32,
+          shape=[self._units],
+          initializer=tf.keras.initializers.zeros(),
+          trainable=self.trainable,
+          use_resource=True,
+          # TODO(rohanj): Get rid of this hack once we have a mechanism for
+          # specifying a default partitioner for an entire layer. In that case,
+          # the default getter for Layers should work.
+          getter=variable_scope.get_variable)
+
+    super(_LinearModelLayer, self).build(None)
+
+  def call(self, features):
+    if not isinstance(features, dict):
+      raise ValueError('We expected a dictionary here. Instead we got: {}'
+                       .format(features))
+    with ops.name_scope(self.name):
+      transformation_cache = fc_v2.FeatureTransformationCache(features)
+      weighted_sums = []
+      for column in self._feature_columns:
+        with ops.name_scope(
+            fc_v2._sanitize_column_name_for_variable_scope(column.name)):  # pylint: disable=protected-access
+          # All the weights used in the linear model are owned by the state
+          # manager associated with this Linear Model.
+          weight_var = self._state_manager.get_variable(column, 'weights')
+
+          weighted_sum = fc_v2._create_weighted_sum(  # pylint: disable=protected-access
+              column=column,
+              transformation_cache=transformation_cache,
+              state_manager=self._state_manager,
+              sparse_combiner=self._sparse_combiner,
+              weight_var=weight_var)
+          weighted_sums.append(weighted_sum)
+
+      fc_v2._verify_static_batch_size_equality(  # pylint: disable=protected-access
+          weighted_sums, self._feature_columns)
+      predictions_no_bias = tf.math.add_n(
+          weighted_sums, name='weighted_sum_no_bias')
+      predictions = tf.nn.bias_add(
+          predictions_no_bias, self.bias, name='weighted_sum')
+      return predictions
+
+  def get_config(self):
+    # Import here to avoid circular imports.
+    from tensorflow.python.feature_column import serialization  # pylint: disable=g-import-not-at-top
+    column_configs = serialization.serialize_feature_columns(
+        self._feature_columns)
+    config = {
+        'feature_columns': column_configs,
+        'units': self._units,
+        'sparse_combiner': self._sparse_combiner
+    }
+
+    base_config = super(  # pylint: disable=bad-super-call
+        _LinearModelLayer, self).get_config()
+    return dict(list(base_config.items()) + list(config.items()))
+
+  @classmethod
+  def from_config(cls, config, custom_objects=None):
+    # Import here to avoid circular imports.
+    from tensorflow.python.feature_column import serialization  # pylint: disable=g-import-not-at-top
+    config_cp = config.copy()
+    columns = serialization.deserialize_feature_columns(
+        config_cp['feature_columns'], custom_objects=custom_objects)
+
+    del config_cp['feature_columns']
+    return cls(feature_columns=columns, **config_cp)
+
+
+class LinearModel(tf.keras.Model):
+  """Produces a linear prediction `Tensor` based on given `feature_columns`.
+
+  This layer generates a weighted sum based on output dimension `units`.
+  Weighted sum refers to logits in classification problems. It refers to the
+  prediction itself for linear regression problems.
+
+  Note on supported columns: `LinearLayer` treats categorical columns as
+  `indicator_column`s. To be specific, assume the input as `SparseTensor` looks
+  like:
+
+  ```python
+    shape = [2, 2]
+    {
+        [0, 0]: "a"
+        [1, 0]: "b"
+        [1, 1]: "c"
+    }
+  ```
+  `linear_model` assigns weights for the presence of "a", "b", "c' implicitly,
+  just like `indicator_column`, while `input_layer` explicitly requires wrapping
+  each of categorical columns with an `embedding_column` or an
+  `indicator_column`.
+
+  Example of usage:
+
+  ```python
+  price = numeric_column('price')
+  price_buckets = bucketized_column(price, boundaries=[0., 10., 100., 1000.])
+  keywords = categorical_column_with_hash_bucket("keywords", 10K)
+  keywords_price = crossed_column('keywords', price_buckets, ...)
+  columns = [price_buckets, keywords, keywords_price ...]
+  linear_model = LinearLayer(columns)
+
+  features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
+  prediction = linear_model(features)
+  ```
+  """
+
+  def __init__(self,
+               feature_columns,
+               units=1,
+               sparse_combiner='sum',
+               trainable=True,
+               name=None,
+               **kwargs):
+    """Constructs a LinearLayer.
+
+    Args:
+      feature_columns: An iterable containing the FeatureColumns to use as
+        inputs to your model. All items should be instances of classes derived
+        from `_FeatureColumn`s.
+      units: An integer, dimensionality of the output space. Default value is 1.
+      sparse_combiner: A string specifying how to reduce if a categorical column
+        is multivalent. Except `numeric_column`, almost all columns passed to
+        `linear_model` are considered as categorical columns.  It combines each
+        categorical column independently. Currently "mean", "sqrtn" and "sum"
+        are supported, with "sum" the default for linear model. "sqrtn" often
+        achieves good accuracy, in particular with bag-of-words columns.
+          * "sum": do not normalize features in the column
+          * "mean": do l1 normalization on features in the column
+          * "sqrtn": do l2 normalization on features in the column
+        For example, for two features represented as the categorical columns:
+
+          ```python
+          # Feature 1
+
+          shape = [2, 2]
+          {
+              [0, 0]: "a"
+              [0, 1]: "b"
+              [1, 0]: "c"
+          }
+
+          # Feature 2
+
+          shape = [2, 3]
+          {
+              [0, 0]: "d"
+              [1, 0]: "e"
+              [1, 1]: "f"
+              [1, 2]: "g"
+          }
+          ```
+
+        with `sparse_combiner` as "mean", the linear model outputs conceptually
+        are
+        ```
+        y_0 = 1.0 / 2.0 * ( w_a + w_ b) + w_c + b_0
+        y_1 = w_d + 1.0 / 3.0 * ( w_e + w_ f + w_g) + b_1
+        ```
+        where `y_i` is the output, `b_i` is the bias, and `w_x` is the weight
+        assigned to the presence of `x` in the input features.
+      trainable: If `True` also add the variable to the graph collection
+        `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
+      name: Name to give to the Linear Model. All variables and ops created will
+        be scoped by this name.
+      **kwargs: Keyword arguments to construct a layer.
+
+    Raises:
+      ValueError: if an item in `feature_columns` is neither a `DenseColumn`
+        nor `CategoricalColumn`.
+    """
+
+    super(LinearModel, self).__init__(name=name, **kwargs)
+    self.layer = _LinearModelLayer(
+        feature_columns,
+        units,
+        sparse_combiner,
+        trainable,
+        name=self.name,
+        **kwargs)
+
+  def call(self, features):
+    """Returns a `Tensor` the represents the predictions of a linear model.
+
+    Args:
+      features: A mapping from key to tensors. `_FeatureColumn`s look up via
+        these keys. For example `numeric_column('price')` will look at 'price'
+        key in this dict. Values are `Tensor` or `SparseTensor` depending on
+        corresponding `_FeatureColumn`.
+
+    Returns:
+      A `Tensor` which represents predictions/logits of a linear model. Its
+      shape is (batch_size, units) and its dtype is `float32`.
+
+    Raises:
+      ValueError: If features are not a dictionary.
+    """
+    return self.layer(features)
+
+  @property
+  def bias(self):
+    return self.layer.bias
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_testing_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_testing_utils.py
new file mode 100644
index 00000000..31791798
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/linear_testing_utils.py
@@ -0,0 +1,2239 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utils for testing linear estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+import os
+import shutil
+import tempfile
+
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.core.example import example_pb2
+from tensorflow.core.example import feature_pb2
+from tensorflow.python.feature_column import feature_column_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.optimizer_v2 import gradient_descent
+from tensorflow.python.keras.optimizer_v2 import optimizer_v2
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator.canned import linear
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.export import export
+from tensorflow_estimator.python.estimator.inputs import numpy_io
+from tensorflow_estimator.python.estimator.inputs import pandas_io
+
+try:
+  # pylint: disable=g-import-not-at-top
+  import pandas as pd
+  HAS_PANDAS = True
+except IOError:
+  # Pandas writes a temporary file during import. If it fails, don't use pandas.
+  HAS_PANDAS = False
+except ImportError:
+  HAS_PANDAS = False
+
+# pylint rules which are disabled by default for test files.
+# pylint: disable=invalid-name,protected-access,missing-docstring
+
+# Names of variables created by model.
+AGE_WEIGHT_NAME = 'linear/linear_model/age/weights'
+HEIGHT_WEIGHT_NAME = 'linear/linear_model/height/weights'
+OCCUPATION_WEIGHT_NAME = 'linear/linear_model/occupation/weights'
+BIAS_NAME = 'linear/linear_model/bias_weights'
+LANGUAGE_WEIGHT_NAME = 'linear/linear_model/language/weights'
+
+
+def assert_close(expected, actual, rtol=1e-04, name='assert_close'):
+  with ops.name_scope(name, 'assert_close', (expected, actual, rtol)) as scope:
+    expected = ops.convert_to_tensor(expected, name='expected')
+    actual = ops.convert_to_tensor(actual, name='actual')
+    rdiff = tf.math.abs(expected - actual, 'diff') / tf.math.abs(expected)
+    rtol = ops.convert_to_tensor(rtol, name='rtol')
+    return tf.compat.v1.debugging.assert_less(
+        rdiff,
+        rtol,
+        data=('Condition expected =~ actual did not hold element-wise:'
+              'expected = ', expected, 'actual = ', actual, 'rdiff = ', rdiff,
+              'rtol = ', rtol,),
+        name=scope)
+
+
+def save_variables_to_ckpt(model_dir):
+  init_all_op = [tf.compat.v1.initializers.global_variables()]
+  with tf.compat.v1.Session() as sess:
+    sess.run(init_all_op)
+    tf.compat.v1.train.Saver().save(sess, os.path.join(model_dir, 'model.ckpt'))
+
+
+def queue_parsed_features(feature_map):
+  tensors_to_enqueue = []
+  keys = []
+  for key, tensor in six.iteritems(feature_map):
+    keys.append(key)
+    tensors_to_enqueue.append(tensor)
+  queue_dtypes = [x.dtype for x in tensors_to_enqueue]
+  input_queue = tf.queue.FIFOQueue(capacity=100, dtypes=queue_dtypes)
+  tf.compat.v1.train.queue_runner.add_queue_runner(
+      tf.compat.v1.train.queue_runner.QueueRunner(
+          input_queue, [input_queue.enqueue(tensors_to_enqueue)]))
+  dequeued_tensors = input_queue.dequeue()
+  return {keys[i]: dequeued_tensors[i] for i in range(len(dequeued_tensors))}
+
+
+def sorted_key_dict(unsorted_dict):
+  return {k: unsorted_dict[k] for k in sorted(unsorted_dict)}
+
+
+def sigmoid(x):
+  return 1 / (1 + np.exp(-1.0 * x))
+
+
+def mock_optimizer(testcase, expected_loss=None):
+  expected_var_names = ['%s:0' % AGE_WEIGHT_NAME, '%s:0' % BIAS_NAME]
+
+  class _Optimizer(optimizer_v2.OptimizerV2):
+
+    def get_updates(self, loss, params):
+      trainable_vars = params
+      testcase.assertItemsEqual(expected_var_names,
+                                [var.name for var in trainable_vars])
+
+      # Verify loss. We can't check the value directly, so we add an assert op.
+      testcase.assertEquals(0, loss.shape.ndims)
+      if expected_loss is None:
+        if self.iterations is not None:
+          return [self.iterations.assign_add(1).op]
+        return [tf.no_op()]
+      assert_loss = assert_close(
+          tf.cast(expected_loss, name='expected', dtype=tf.dtypes.float32),
+          loss,
+          name='assert_loss')
+      with tf.control_dependencies((assert_loss,)):
+        if self.iterations is not None:
+          return [self.iterations.assign_add(1).op]
+        return [tf.no_op()]
+
+    def get_config(self):
+      config = super(_Optimizer, self).get_config()
+      return config
+
+  optimizer = _Optimizer(name='my_optimizer')
+
+  return optimizer
+
+
+# TODO(b/36813849): Add tests with dynamic shape inputs using placeholders.
+class BaseLinearRegressorEvaluationTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column_v2):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_evaluation_for_simple_data(self):
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((10.,),)), steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10. Loss is 3**2 = 9.
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 9.,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_batch(self):
+    """Tests evaluation for batch_size==2."""
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(
+        input_fn=lambda: ({
+            'age': ((1,), (1,))
+        }, ((10.,), (10.,))), steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10.
+    # Loss per example is 3**2 = 9.
+    # Training loss is the sum over batch size = (9 + 9) / 2 = 9
+    # Average loss is the average over batch = 9
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 9.,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_weights(self):
+    """Tests evaluation with weights."""
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      features = {'age': ((1,), (1,)), 'weights': ((1.,), (2.,))}
+      labels = ((10.,), (10.,))
+      return features, labels
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='weights',
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(input_fn=_input_fn, steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10.
+    # Loss per example is 3**2 = 9.
+    # Training loss is the weighted sum over batch / batch size =
+    #     (9 + 2*9) / 2 = 13.5
+    # average loss is the weighted average = 9 + 2*9 / (1 + 2) = 9
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 13.5,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_for_multi_dimensions(self):
+    x_dim = 3
+    label_dim = 2
+    with tf.Graph().as_default():
+      tf.Variable([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([7.0, 8.0], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age', shape=(x_dim,)),),
+        label_dimension=label_dim,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': np.array([[2., 4., 5.]]),
+        },
+        y=np.array([[46., 58.]]),
+        batch_size=1,
+        num_epochs=None,
+        shuffle=False)
+    eval_metrics = linear_regressor.evaluate(input_fn=input_fn, steps=1)
+
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is
+    #   [2., 4., 5.] * [1.0, 2.0] + [7.0, 8.0] = [39, 50] + [7.0, 8.0]
+    #                  [3.0, 4.0]
+    #                  [5.0, 6.0]
+    # which is [46, 58]
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_evaluation_for_multiple_feature_columns(self):
+    with tf.Graph().as_default():
+      tf.Variable([[10.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([[2.0]], name=HEIGHT_WEIGHT_NAME)
+      tf.Variable([5.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    batch_size = 2
+    feature_columns = [
+        self._fc_lib.numeric_column('age'),
+        self._fc_lib.numeric_column('height')
+    ]
+    input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': np.array([20, 40]),
+            'height': np.array([4, 8])
+        },
+        y=np.array([[213.], [421.]]),
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=False)
+
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+
+    eval_metrics = est.evaluate(input_fn=input_fn, steps=1)
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is [(20. * 10.0 + 4 * 2.0 + 5.0), (40. * 10.0 + 8 * 2.0 + 5.0)] =
+    # [213.0, 421.0], while label is [213., 421.]. Loss = 0.
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_evaluation_for_multiple_feature_columns_mix(self):
+    with tf.Graph().as_default():
+      tf.Variable([[10.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([[2.0]], name=HEIGHT_WEIGHT_NAME)
+      tf.Variable([5.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    batch_size = 2
+    feature_columns = [
+        tf.feature_column.numeric_column('age'),
+        tf.feature_column.numeric_column('height')
+    ]
+
+    def _input_fn():
+      features_ds = tf.compat.v1.data.Dataset.from_tensor_slices({
+          'age': np.array([20, 40]),
+          'height': np.array([4, 8])
+      })
+      labels_ds = tf.compat.v1.data.Dataset.from_tensor_slices(
+          np.array([[213.], [421.]]))
+      return (tf.compat.v1.data.Dataset.zip(
+          (features_ds, labels_ds)).batch(batch_size).repeat(None))
+
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+
+    eval_metrics = est.evaluate(input_fn=_input_fn, steps=1)
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is [(20. * 10.0 + 4 * 2.0 + 5.0), (40. * 10.0 + 8 * 2.0 + 5.0)] =
+    # [213.0, 421.0], while label is [213., 421.]. Loss = 0.
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+
+class BaseLinearRegressorPredictTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column_v2):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_1d(self):
+    """Tests predict when all variables are one-dimensional."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('x'),),
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[2.]])},
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x * weight + bias = 2. * 10. + .2 = 20.2
+    self.assertAllClose([[20.2]], predicted_scores)
+
+  def testMultiDim(self):
+    """Tests predict when all variables are multi-dimenstional."""
+    batch_size = 2
+    label_dimension = 3
+    x_dim = 4
+    feature_columns = (self._fc_lib.numeric_column('x', shape=(x_dim,)),)
+    with tf.Graph().as_default():
+      tf.Variable(  # shape=[x_dim, label_dimension]
+          [[1., 2., 3.], [2., 3., 4.], [3., 4., 5.], [4., 5., 6.]],
+          name='linear/linear_model/x/weights')
+      tf.Variable(  # shape=[label_dimension]
+          [.2, .4, .6], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        # x shape=[batch_size, x_dim]
+        x={'x': np.array([[1., 2., 3., 4.], [5., 6., 7., 8.]])},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # score = x * weight + bias, shape=[batch_size, label_dimension]
+    self.assertAllClose([[30.2, 40.4, 50.6], [70.2, 96.4, 122.6]],
+                        predicted_scores)
+
+  def testTwoFeatureColumns(self):
+    """Tests predict with two feature columns."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x0/weights')
+      tf.Variable([[20.]], name='linear/linear_model/x1/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('x0'),
+                         self._fc_lib.numeric_column('x1')),
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'x0': np.array([[2.]]),
+            'x1': np.array([[3.]])
+        },
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x0 * weight0 + x1 * weight1 + bias = 2. * 10. + 3. * 20 + .2 = 80.2
+    self.assertAllClose([[80.2]], predicted_scores)
+
+  def testTwoFeatureColumnsMix(self):
+    """Tests predict with two feature columns."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x0/weights')
+      tf.Variable([[20.]], name='linear/linear_model/x1/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(tf.feature_column.numeric_column('x0'),
+                         tf.feature_column.numeric_column('x1')),
+        model_dir=self._model_dir)
+
+    def _predict_input_fn():
+      return tf.compat.v1.data.Dataset.from_tensor_slices({
+          'x0': np.array([[2.]]),
+          'x1': np.array([[3.]])
+      }).batch(1)
+
+    predictions = linear_regressor.predict(input_fn=_predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x0 * weight0 + x1 * weight1 + bias = 2. * 10. + 3. * 20 + .2 = 80.2
+    self.assertAllClose([[80.2]], predicted_scores)
+
+  def testSparseCombiner(self):
+    w_a = 2.0
+    w_b = 3.0
+    w_c = 5.0
+    bias = 5.0
+    with tf.Graph().as_default():
+      tf.Variable([[w_a], [w_b], [w_c]], name=LANGUAGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          1, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      return tf.compat.v1.data.Dataset.from_tensors({
+          'language':
+              tf.sparse.SparseTensor(
+                  values=['a', 'c', 'b', 'c'],
+                  indices=[[0, 0], [0, 1], [1, 0], [1, 1]],
+                  dense_shape=[2, 2]),
+      })
+
+    feature_columns = (self._fc_lib.categorical_column_with_vocabulary_list(
+        'language', vocabulary_list=['a', 'b', 'c']),)
+
+    # Check prediction for each sparse_combiner.
+    # With sparse_combiner = 'sum', we have
+    # logits_1 = w_a + w_c + bias
+    #          = 2.0 + 5.0 + 5.0 = 12.0
+    # logits_2 = w_b + w_c + bias
+    #          = 3.0 + 5.0 + 5.0 = 13.0
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[12.0], [13.0]], predicted_scores)
+
+    # With sparse_combiner = 'mean', we have
+    # logits_1 = 1/2 * (w_a + w_c) + bias
+    #          = 1/2 * (2.0 + 5.0) + 5.0 = 8.5
+    # logits_2 = 1/2 * (w_b + w_c) + bias
+    #          = 1/2 * (3.0 + 5.0) + 5.0 = 9.0
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='mean')
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[8.5], [9.0]], predicted_scores)
+
+    # With sparse_combiner = 'sqrtn', we have
+    # logits_1 = sqrt(2)/2 * (w_a + w_c) + bias
+    #          = sqrt(2)/2 * (2.0 + 5.0) + 5.0 = 9.94974
+    # logits_2 = sqrt(2)/2 * (w_b + w_c) + bias
+    #          = sqrt(2)/2 * (3.0 + 5.0) + 5.0 = 10.65685
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='sqrtn')
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[9.94974], [10.65685]], predicted_scores)
+
+
+class BaseLinearRegressorIntegrationTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column_v2):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
+                          input_dimension, label_dimension, prediction_length):
+    feature_columns = [
+        self._fc_lib.numeric_column('x', shape=(input_dimension,))
+    ]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    # TRAIN
+    # learn y = x
+    est.train(train_input_fn, steps=200)
+
+    # EVALUTE
+    scores = est.evaluate(eval_input_fn)
+    self.assertEqual(200, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertIn(metric_keys.MetricKeys.LOSS, six.iterkeys(scores))
+
+    # PREDICT
+    predictions = np.array(
+        [x['predictions'] for x in est.predict(predict_input_fn)])
+    self.assertAllEqual((prediction_length, label_dimension), predictions.shape)
+
+    # EXPORT
+    feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
+    serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
+        feature_spec)
+    export_dir = est.export_saved_model(tempfile.mkdtemp(),
+                                        serving_input_receiver_fn)
+    self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
+
+  def test_numpy_input_fn(self):
+    """Tests complete flow with numpy_input_fn."""
+    label_dimension = 2
+    input_dimension = label_dimension
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, label_dimension)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=data,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    eval_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=data,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+
+    self._test_complete_flow(
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+  def test_pandas_input_fn(self):
+    """Tests complete flow with pandas_input_fn."""
+    if not HAS_PANDAS:
+      return
+
+    # Pandas DataFrame natually supports 1 dim data only.
+    label_dimension = 1
+    input_dimension = label_dimension
+    batch_size = 10
+    data = np.array([1., 2., 3., 4.], dtype=np.float32)
+    x = pd.DataFrame({'x': data})
+    y = pd.Series(data)
+    prediction_length = 4
+
+    train_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
+    eval_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, shuffle=False)
+    predict_input_fn = pandas_io.pandas_input_fn(
+        x=x, batch_size=batch_size, shuffle=False)
+
+    self._test_complete_flow(
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+  def test_input_fn_from_parse_example(self):
+    """Tests complete flow with input_fn constructed from parse_example."""
+    label_dimension = 2
+    input_dimension = label_dimension
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, label_dimension)
+
+    serialized_examples = []
+    for datum in data:
+      example = example_pb2.Example(
+          features=feature_pb2.Features(
+              feature={
+                  'x':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(value=datum)),
+                  'y':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(
+                              value=datum[:label_dimension])),
+              }))
+      serialized_examples.append(example.SerializeToString())
+
+    feature_spec = {
+        'x': tf.io.FixedLenFeature([input_dimension], tf.dtypes.float32),
+        'y': tf.io.FixedLenFeature([label_dimension], tf.dtypes.float32),
+    }
+
+    def _train_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(serialized_examples,
+                                                  feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _eval_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _predict_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      features.pop('y')
+      return features, None
+
+    self._test_complete_flow(
+        train_input_fn=_train_input_fn,
+        eval_input_fn=_eval_input_fn,
+        predict_input_fn=_predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+
+class BaseLinearRegressorTrainingTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column_v2):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _assert_checkpoint(self,
+                         expected_global_step,
+                         expected_age_weight=None,
+                         expected_bias=None):
+    shapes = {
+        name: shape
+        for (name, shape) in tf.train.list_variables(self._model_dir)
+    }
+
+    self.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertEqual(
+        expected_global_step,
+        tf.train.load_variable(self._model_dir,
+                               tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+    self.assertEqual([1, 1], shapes[AGE_WEIGHT_NAME])
+    if expected_age_weight is not None:
+      self.assertEqual(expected_age_weight,
+                       tf.train.load_variable(self._model_dir, AGE_WEIGHT_NAME))
+
+    self.assertEqual([1], shapes[BIAS_NAME])
+    if expected_bias is not None:
+      self.assertEqual(expected_bias,
+                       tf.train.load_variable(self._model_dir, BIAS_NAME))
+
+  def testFromScratchWithDefaultOptimizer(self):
+    # Create LinearRegressor.
+    label = 5.
+    age = 17
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self._assert_checkpoint(num_steps)
+
+  def testTrainWithOneDimLabel(self):
+    label_dimension = 1
+    batch_size = 20
+    feature_columns = [self._fc_lib.numeric_column('age', shape=(1,))]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+    data_rank_1 = np.linspace(0., 2., batch_size, dtype=np.float32)
+    self.assertEqual((batch_size,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(200)
+
+  def testTrainWithOneDimWeight(self):
+    label_dimension = 1
+    batch_size = 20
+    feature_columns = [self._fc_lib.numeric_column('age', shape=(1,))]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    data_rank_1 = np.linspace(0., 2., batch_size, dtype=np.float32)
+    self.assertEqual((batch_size,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_1
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(200)
+
+  def testFromScratch(self):
+    # Create LinearRegressor.
+    label = 5.
+    age = 17
+    # loss = (logits - label)^2 = (0 - 5.)^2 = 25.
+    mock_opt = mock_optimizer(self, expected_loss=25.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_opt)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(
+        num_steps,
+        linear_regressor.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        expected_global_step=num_steps,
+        expected_age_weight=0.,
+        expected_bias=0.)
+
+  def testFromCheckpoint(self):
+    # Create initial checkpoint.
+    age_weight = 10.0
+    bias = 5.0
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable([[age_weight]], name=AGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias = 17 * 10. + 5. = 175
+    # loss = (logits - label)^2 = (175 - 5)^2 = 28900
+    mock_opt = mock_optimizer(self, expected_loss=28900.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_opt)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((17,),)
+        }, ((5.,),)), steps=num_steps)
+    self.assertEqual(
+        initial_global_step + num_steps,
+        linear_regressor.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testFromCheckpointMultiBatch(self):
+    # Create initial checkpoint.
+    age_weight = 10.0
+    bias = 5.0
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable([[age_weight]], name=AGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias
+    # logits[0] = 17 * 10. + 5. = 175
+    # logits[1] = 15 * 10. + 5. = 155
+    # loss = sum(logits - label)^2 = (175 - 5)^2 + (155 - 3)^2 = 52004
+    # expected_loss = loss / 2 = 26002
+    mock_opt = mock_optimizer(self, expected_loss=26002.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_opt)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((17,), (15,))
+        }, ((5.,), (3.,))),
+        steps=num_steps)
+    self.assertEqual(
+        initial_global_step + num_steps,
+        linear_regressor.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+
+class BaseLinearClassifierTrainingTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column_v2):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _assert_checkpoint(self,
+                         n_classes,
+                         expected_global_step,
+                         expected_age_weight=None,
+                         expected_bias=None):
+    logits_dimension = n_classes if n_classes > 2 else 1
+
+    shapes = {
+        name: shape
+        for (name, shape) in tf.train.list_variables(self._model_dir)
+    }
+
+    self.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertEqual(
+        expected_global_step,
+        tf.train.load_variable(self._model_dir,
+                               tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+    self.assertEqual([1, logits_dimension], shapes[AGE_WEIGHT_NAME])
+    if expected_age_weight is not None:
+      self.assertAllEqual(
+          expected_age_weight,
+          tf.train.load_variable(self._model_dir, AGE_WEIGHT_NAME))
+
+    self.assertEqual([logits_dimension], shapes[BIAS_NAME])
+    if expected_bias is not None:
+      self.assertAllEqual(expected_bias,
+                          tf.train.load_variable(self._model_dir, BIAS_NAME))
+
+  def _testFromScratchWithDefaultOptimizer(self, n_classes):
+    label = 0
+    age = 17
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self._assert_checkpoint(n_classes, num_steps)
+
+  def testBinaryClassesFromScratchWithDefaultOptimizer(self):
+    self._testFromScratchWithDefaultOptimizer(n_classes=2)
+
+  def testMultiClassesFromScratchWithDefaultOptimizer(self):
+    self._testFromScratchWithDefaultOptimizer(n_classes=4)
+
+  def _testTrainWithTwoDimsLabel(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    data_rank_2 = np.array([[0], [1]])
+    self.assertEqual((2,), data_rank_1.shape)
+    self.assertEqual((2, 1), data_rank_2.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_2,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithTwoDimsLabel(self):
+    self._testTrainWithTwoDimsLabel(n_classes=2)
+
+  def testMultiClassesTrainWithTwoDimsLabel(self):
+    self._testTrainWithTwoDimsLabel(n_classes=4)
+
+  def _testTrainWithOneDimLabel(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    self.assertEqual((2,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithOneDimLabel(self):
+    self._testTrainWithOneDimLabel(n_classes=2)
+
+  def testMultiClassesTrainWithOneDimLabel(self):
+    self._testTrainWithOneDimLabel(n_classes=4)
+
+  def _testTrainWithTwoDimsWeight(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='w',
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    data_rank_2 = np.array([[0], [1]])
+    self.assertEqual((2,), data_rank_1.shape)
+    self.assertEqual((2, 1), data_rank_2.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_2
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithTwoDimsWeight(self):
+    self._testTrainWithTwoDimsWeight(n_classes=2)
+
+  def testMultiClassesTrainWithTwoDimsWeight(self):
+    self._testTrainWithTwoDimsWeight(n_classes=4)
+
+  def _testTrainWithOneDimWeight(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='w',
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    self.assertEqual((2,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_1
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithOneDimWeight(self):
+    self._testTrainWithOneDimWeight(n_classes=2)
+
+  def testMultiClassesTrainWithOneDimWeight(self):
+    self._testTrainWithOneDimWeight(n_classes=4)
+
+  def _testFromScratch(self, n_classes):
+    label = 1
+    age = 17
+    # For binary classifier:
+    #   loss = sigmoid_cross_entropy(logits, label) where logits=0 (weights are
+    #   all zero initially) and label = 1 so,
+    #      loss = 1 * -log ( sigmoid(logits) ) = 0.69315
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label) where logits are all 0s (weights are
+    #   all zero initially) and label = 1 so,
+    #      loss = 1 * -log ( 1.0 / n_classes )
+    # For this particular test case, as logits are same, the formular
+    # 1 * -log ( 1.0 / n_classes ) covers both binary and multi class cases.
+    mock_opt = mock_optimizer(
+        self, expected_loss=-1 * math.log(1.0 / n_classes))
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(num_steps,
+                     est.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=num_steps,
+        expected_age_weight=[[0.]] if n_classes == 2 else [[0.] * n_classes],
+        expected_bias=[0.] if n_classes == 2 else [.0] * n_classes)
+
+  def testBinaryClassesFromScratch(self):
+    self._testFromScratch(n_classes=2)
+
+  def testMultiClassesFromScratch(self):
+    self._testFromScratch(n_classes=4)
+
+  def _testFromCheckpoint(self, n_classes):
+    # Create initial checkpoint.
+    label = 1
+    age = 17
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # For binary classifier:
+    #   logits = age * age_weight + bias = 17 * 2. - 35. = -1.
+    #   loss = sigmoid_cross_entropy(logits, label)
+    #   so, loss = 1 * -log ( sigmoid(-1) ) = 1.3133
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label)
+    #   where logits = 17 * age_weight + bias and label = 1
+    #   so, loss = 1 * -log ( soft_max(logits)[1] )
+    if n_classes == 2:
+      expected_loss = 1.3133
+    else:
+      logits = age_weight * age + bias
+      logits_exp = np.exp(logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_loss = -1 * math.log(softmax[0, label])
+
+    mock_opt = mock_optimizer(self, expected_loss=expected_loss)
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(initial_global_step + num_steps,
+                     est.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testBinaryClassesFromCheckpoint(self):
+    self._testFromCheckpoint(n_classes=2)
+
+  def testMultiClassesFromCheckpoint(self):
+    self._testFromCheckpoint(n_classes=4)
+
+  def _testFromCheckpointFloatLabels(self, n_classes):
+    """Tests float labels for binary classification."""
+    # Create initial checkpoint.
+    if n_classes > 2:
+      return
+    label = 0.8
+    age = 17
+    age_weight = [[2.0]]
+    bias = [-35.0]
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias = 17 * 2. - 35. = -1.
+    # loss = sigmoid_cross_entropy(logits, label)
+    # => loss = -0.8 * log(sigmoid(-1)) -0.2 * log(sigmoid(+1)) = 1.1132617
+    mock_opt = mock_optimizer(self, expected_loss=1.1132617)
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(initial_global_step + num_steps,
+                     est.get_variable_value(mock_opt.iterations.name))
+
+  def testBinaryClassesFromCheckpointFloatLabels(self):
+    self._testFromCheckpointFloatLabels(n_classes=2)
+
+  def testMultiClassesFromCheckpointFloatLabels(self):
+    self._testFromCheckpointFloatLabels(n_classes=4)
+
+  def _testFromCheckpointMultiBatch(self, n_classes):
+    # Create initial checkpoint.
+    label = [1, 0]
+    age = [17.0, 18.5]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # For binary classifier:
+    #   logits = age * age_weight + bias
+    #   logits[0] = 17 * 2. - 35. = -1.
+    #   logits[1] = 18.5 * 2. - 35. = 2.
+    #   loss = sigmoid_cross_entropy(logits, label)
+    #   so, loss[0] = 1 * -log ( sigmoid(-1) ) = 1.3133
+    #       loss[1] = (1 - 0) * -log ( 1- sigmoid(2) ) = 2.1269
+    #   expected_loss = (loss[0] + loss[1]) / batch size (2)
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label)
+    #   where logits = [17, 18.5] * age_weight + bias and label = [1, 0]
+    #   so, loss = 1 * -log ( soft_max(logits)[label] )
+    #   expected_loss = (loss[0] + loss[1]) / batch size (2)
+    if n_classes == 2:
+      expected_loss = (1.3133 + 2.1269) / 2
+    else:
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      expected_loss = (expected_loss_0 + expected_loss_1) / 2
+
+    mock_opt = mock_optimizer(self, expected_loss=expected_loss)
+
+    est = linear.LinearClassifierV2(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_opt,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(input_fn=lambda: ({'age': (age)}, (label)), steps=num_steps)
+    self.assertEqual(initial_global_step + num_steps,
+                     est.get_variable_value(mock_opt.iterations.name))
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testBinaryClassesFromCheckpointMultiBatch(self):
+    self._testFromCheckpointMultiBatch(n_classes=2)
+
+  def testMultiClassesFromCheckpointMultiBatch(self):
+    self._testFromCheckpointMultiBatch(n_classes=4)
+
+
+class BaseLinearClassifierEvaluationTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column_v2):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _test_evaluation_for_simple_data(self, n_classes):
+    label = 1
+    age = 1.
+
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[-11.0]] if n_classes == 2 else (np.reshape(
+        -11.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-30.0] if n_classes == 2 else [-30.0] * n_classes
+
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=1)
+
+    if n_classes == 2:
+      # Binary classes: loss = sum(corss_entropy(41)) = 41.
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: 41.,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: 41.,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: 0.,
+          metric_keys.MetricKeys.LABEL_MEAN: 1.,
+          metric_keys.MetricKeys.ACCURACY_BASELINE: 1,
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 1.,
+      }
+    else:
+      # Multi classes: loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * age + bias
+      logits_exp = np.exp(logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_loss = -1 * math.log(softmax[0, label])
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_for_simple_data(self):
+    self._test_evaluation_for_simple_data(n_classes=2)
+
+  def test_multi_classes_evaluation_for_simple_data(self):
+    self._test_evaluation_for_simple_data(n_classes=4)
+
+  def _test_evaluation_batch(self, n_classes):
+    """Tests evaluation for batch_size==2."""
+    label = [1, 0]
+    age = [17., 18.]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': (age)
+        }, (label)), steps=1)
+
+    if n_classes == 2:
+      # Logits are (-1., 1.) labels are (1, 0).
+      # Loss is
+      #   loss for row 1: 1 * -log(sigmoid(-1)) = 1.3133
+      #   loss for row 2: (1 - 0) * -log(1 - sigmoid(1)) = 1.3133
+      expected_loss = (1.3133 * 2) / 2  # Divided by batch size
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: 0.5,
+          metric_keys.MetricKeys.LABEL_MEAN: 0.5,
+          metric_keys.MetricKeys.ACCURACY_BASELINE: 0.5,
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 0.3068,
+      }
+    else:
+      # Multi classes: loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      expected_loss = (expected_loss_0 + expected_loss_1) / 2  # batch size
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_batch(self):
+    self._test_evaluation_batch(n_classes=2)
+
+  def test_multi_classes_evaluation_batch(self):
+    self._test_evaluation_batch(n_classes=4)
+
+  def _test_evaluation_weights(self, n_classes):
+    """Tests evaluation with weights."""
+
+    label = [1, 0]
+    age = [17., 18.]
+    weights = [1., 2.]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        weight_column='w',
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': (age),
+            'w': (weights)
+        }, (label)), steps=1)
+
+    if n_classes == 2:
+      # Logits are (-1., 1.) labels are (1, 0).
+      # Loss is
+      #   loss for row 1: 1 * -log(sigmoid(-1)) = 1.3133
+      #   loss for row 2: (1 - 0) * -log(1 - sigmoid(1)) = 1.3133
+      #   weights = [1., 2.]
+      expected_loss = (1.3133 * (1. + 2.)) / 2  # Divided by batch size
+      loss_mean = (1.3133 * (1. + 2.)) / (1.0 + 2.0)
+      label_mean = np.average(label, weights=weights)
+      logits = [-1, 1]
+      logistics = sigmoid(np.array(logits))
+      predictions_mean = np.average(logistics, weights=weights)
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: loss_mean,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: predictions_mean,
+          metric_keys.MetricKeys.LABEL_MEAN: label_mean,
+          metric_keys.MetricKeys.ACCURACY_BASELINE:
+              (max(label_mean, 1 - label_mean)),
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 0.1891,
+      }
+    else:
+      # Multi classes: unweighted_loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      loss_mean = np.average([expected_loss_0, expected_loss_1],
+                             weights=weights)
+      expected_loss = (loss_mean * np.sum(weights)) / 2  # batch size
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: loss_mean,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_weights(self):
+    self._test_evaluation_weights(n_classes=2)
+
+  def test_multi_classes_evaluation_weights(self):
+    self._test_evaluation_weights(n_classes=4)
+
+
+class BaseLinearClassifierPredictTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column_v2):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _testPredictions(self, n_classes, label_vocabulary, label_output_fn):
+    """Tests predict when all variables are one-dimensional."""
+    age = 1.
+
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[-11.0]] if n_classes == 2 else (np.reshape(
+        -11.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [10.0] if n_classes == 2 else [10.0] * n_classes
+
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        label_vocabulary=label_vocabulary,
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'age': np.array([[age]])},
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = list(est.predict(input_fn=predict_input_fn))
+
+    if n_classes == 2:
+      scalar_logits = np.asscalar(
+          np.reshape(np.array(age_weight) * age + bias, (1,)))
+      two_classes_logits = [0, scalar_logits]
+      two_classes_logits_exp = np.exp(two_classes_logits)
+      softmax = two_classes_logits_exp / two_classes_logits_exp.sum()
+
+      expected_predictions = {
+          'class_ids': [0],
+          'all_class_ids': [0, 1],
+          'classes': [label_output_fn(0)],
+          'all_classes': [label_output_fn(0),
+                          label_output_fn(1)],
+          'logistic': [sigmoid(np.array(scalar_logits))],
+          'logits': [scalar_logits],
+          'probabilities': softmax,
+      }
+    else:
+      onedim_logits = np.reshape(np.array(age_weight) * age + bias, (-1,))
+      class_ids = onedim_logits.argmax()
+      all_class_ids = list(range(len(onedim_logits)))
+      logits_exp = np.exp(onedim_logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_predictions = {
+          'class_ids': [class_ids],
+          'all_class_ids': all_class_ids,
+          'classes': [label_output_fn(class_ids)],
+          'all_classes': [label_output_fn(i) for i in all_class_ids],
+          'logits': onedim_logits,
+          'probabilities': softmax,
+      }
+
+    self.assertEqual(1, len(predictions))
+    # assertAllClose cannot handle byte type.
+    self.assertEqual(expected_predictions['classes'], predictions[0]['classes'])
+    expected_predictions.pop('classes')
+    predictions[0].pop('classes')
+    self.assertAllEqual(expected_predictions['all_classes'],
+                        predictions[0]['all_classes'])
+    expected_predictions.pop('all_classes')
+    predictions[0].pop('all_classes')
+    self.assertAllClose(
+        sorted_key_dict(expected_predictions), sorted_key_dict(predictions[0]))
+
+  def testBinaryClassesWithoutLabelVocabulary(self):
+    n_classes = 2
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=None,
+        label_output_fn=lambda x: ('%s' % x).encode())
+
+  def testBinaryClassesWithLabelVocabulary(self):
+    n_classes = 2
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def testMultiClassesWithoutLabelVocabulary(self):
+    n_classes = 4
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=None,
+        label_output_fn=lambda x: ('%s' % x).encode())
+
+  def testMultiClassesWithLabelVocabulary(self):
+    n_classes = 4
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def testSparseCombiner(self):
+    w_a = 2.0
+    w_b = 3.0
+    w_c = 5.0
+    bias = 5.0
+    with tf.Graph().as_default():
+      tf.Variable([[w_a], [w_b], [w_c]], name=LANGUAGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          1, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      return tf.compat.v1.data.Dataset.from_tensors({
+          'language':
+              tf.sparse.SparseTensor(
+                  values=['a', 'c', 'b', 'c'],
+                  indices=[[0, 0], [0, 1], [1, 0], [1, 1]],
+                  dense_shape=[2, 2]),
+      })
+
+    feature_columns = (self._fc_lib.categorical_column_with_vocabulary_list(
+        'language', vocabulary_list=['a', 'b', 'c']),)
+
+    # Check prediction for each sparse_combiner.
+    # With sparse_combiner = 'sum', we have
+    # logits_1 = w_a + w_c + bias
+    #          = 2.0 + 5.0 + 5.0 = 12.0
+    # logits_2 = w_b + w_c + bias
+    #          = 3.0 + 5.0 + 5.0 = 13.0
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[12.0], [13.0]], predicted_scores)
+
+    # With sparse_combiner = 'mean', we have
+    # logits_1 = 1/2 * (w_a + w_c) + bias
+    #          = 1/2 * (2.0 + 5.0) + 5.0 = 8.5
+    # logits_2 = 1/2 * (w_b + w_c) + bias
+    #          = 1/2 * (3.0 + 5.0) + 5.0 = 9.0
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='mean')
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[8.5], [9.0]], predicted_scores)
+
+    # With sparse_combiner = 'sqrtn', we have
+    # logits_1 = sqrt(2)/2 * (w_a + w_c) + bias
+    #          = sqrt(2)/2 * (2.0 + 5.0) + 5.0 = 9.94974
+    # logits_2 = sqrt(2)/2 * (w_b + w_c) + bias
+    #          = sqrt(2)/2 * (3.0 + 5.0) + 5.0 = 10.65685
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='sqrtn')
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[9.94974], [10.65685]], predicted_scores)
+
+
+class BaseLinearClassifierIntegrationTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column_v2):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _test_complete_flow(self, n_classes, train_input_fn, eval_input_fn,
+                          predict_input_fn, input_dimension, prediction_length):
+    feature_columns = [
+        self._fc_lib.numeric_column('x', shape=(input_dimension,))
+    ]
+    est = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # TRAIN
+    # learn y = x
+    est.train(train_input_fn, steps=200)
+
+    # EVALUTE
+    scores = est.evaluate(eval_input_fn)
+    self.assertEqual(200, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertIn(metric_keys.MetricKeys.LOSS, six.iterkeys(scores))
+
+    # PREDICT
+    predictions = np.array(
+        [x['classes'] for x in est.predict(predict_input_fn)])
+    self.assertAllEqual((prediction_length, 1), predictions.shape)
+
+    # EXPORT
+    feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
+    serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
+        feature_spec)
+    export_dir = est.export_saved_model(tempfile.mkdtemp(),
+                                        serving_input_receiver_fn)
+    self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
+
+  def _test_numpy_input_fn(self, n_classes):
+    """Tests complete flow with numpy_input_fn."""
+    input_dimension = 4
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * input_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, input_dimension)
+    target = np.array([1] * batch_size)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=target,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    eval_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=target,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_numpy_input_fn(self):
+    self._test_numpy_input_fn(n_classes=2)
+
+  def test_multi_classes_numpy_input_fn(self):
+    self._test_numpy_input_fn(n_classes=4)
+
+  def _test_pandas_input_fn(self, n_classes):
+    """Tests complete flow with pandas_input_fn."""
+    if not HAS_PANDAS:
+      return
+
+    # Pandas DataFrame natually supports 1 dim data only.
+    input_dimension = 1
+    batch_size = 10
+    data = np.array([1., 2., 3., 4.], dtype=np.float32)
+    target = np.array([1, 0, 1, 0], dtype=np.int32)
+    x = pd.DataFrame({'x': data})
+    y = pd.Series(target)
+    prediction_length = 4
+
+    train_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
+    eval_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, shuffle=False)
+    predict_input_fn = pandas_io.pandas_input_fn(
+        x=x, batch_size=batch_size, shuffle=False)
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_pandas_input_fn(self):
+    self._test_pandas_input_fn(n_classes=2)
+
+  def test_multi_classes_pandas_input_fn(self):
+    self._test_pandas_input_fn(n_classes=4)
+
+  def _test_input_fn_from_parse_example(self, n_classes):
+    """Tests complete flow with input_fn constructed from parse_example."""
+    input_dimension = 2
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * input_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, input_dimension)
+    target = np.array([1] * batch_size, dtype=np.int64)
+
+    serialized_examples = []
+    for x, y in zip(data, target):
+      example = example_pb2.Example(
+          features=feature_pb2.Features(
+              feature={
+                  'x':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(value=x)),
+                  'y':
+                      feature_pb2.Feature(
+                          int64_list=feature_pb2.Int64List(value=[y])),
+              }))
+      serialized_examples.append(example.SerializeToString())
+
+    feature_spec = {
+        'x': tf.io.FixedLenFeature([input_dimension], tf.dtypes.float32),
+        'y': tf.io.FixedLenFeature([1], tf.dtypes.int64),
+    }
+
+    def _train_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(serialized_examples,
+                                                  feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _eval_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _predict_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      features.pop('y')
+      return features, None
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=_train_input_fn,
+        eval_input_fn=_eval_input_fn,
+        predict_input_fn=_predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_input_fn_from_parse_example(self):
+    self._test_input_fn_from_parse_example(n_classes=2)
+
+  def test_multi_classes_input_fn_from_parse_example(self):
+    self._test_input_fn_from_parse_example(n_classes=4)
+
+
+class BaseLinearLogitFnTest(object):
+
+  def __init__(self, fc_lib=feature_column_v2):
+    self._fc_lib = fc_lib
+
+  def test_basic_logit_correctness(self):
+    """linear_logit_fn simply wraps feature_column_lib.linear_model."""
+    age = self._fc_lib.numeric_column('age')
+    with tf.Graph().as_default():
+      logit_fn = linear.linear_logit_fn_builder(units=2, feature_columns=[age])
+      logits = logit_fn(features={'age': [[23.], [31.]]})
+      bias_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES,
+          'linear_model/bias_weights')[0]
+      age_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, 'linear_model/age')[0]
+      with tf.compat.v1.Session() as sess:
+        sess.run([tf.compat.v1.initializers.global_variables()])
+        self.assertAllClose([[0., 0.], [0., 0.]], logits.eval())
+        sess.run(bias_var.assign([10., 5.]))
+        self.assertAllClose([[10., 5.], [10., 5.]], logits.eval())
+        sess.run(age_var.assign([[2.0, 3.0]]))
+        # [2 * 23 + 10, 3 * 23 + 5] = [56, 74].
+        # [2 * 31 + 10, 3 * 31 + 5] = [72, 98]
+        self.assertAllClose([[56., 74.], [72., 98.]], logits.eval())
+
+  def test_compute_fraction_of_zero_v2(self):
+    """Tests the calculation of sparsity."""
+    if self._fc_lib != feature_column_v2:
+      return
+
+    age = tf.feature_column.numeric_column('age')
+    occupation = tf.feature_column.categorical_column_with_hash_bucket(
+        'occupation', hash_bucket_size=5)
+    with tf.Graph().as_default():
+      model = linear.LinearModel(
+          feature_columns=[age, occupation], units=3, name='linear_model')
+      features = {
+          'age': [[23.], [31.]],
+          'occupation': [['doctor'], ['engineer']]
+      }
+      model(features)
+      variables = model.variables
+      variables.remove(model.bias)
+      fraction_zero = linear._compute_fraction_of_zero(variables)
+      age_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, 'linear_model/age')[0]
+      with tf.compat.v1.Session() as sess:
+        sess.run([tf.compat.v1.initializers.global_variables()])
+        # Upon initialization, all variables will be zero.
+        self.assertAllClose(1, fraction_zero.eval())
+
+        sess.run(age_var.assign([[2.0, 0.0, -1.0]]))
+        # 1 of the 3 age weights are zero, and all of the 15 (5 hash buckets
+        # x 3-dim output) are zero.
+        self.assertAllClose(16. / 18., fraction_zero.eval())
+
+
+class BaseLinearWarmStartingTest(object):
+
+  def __init__(self,
+               _linear_classifier_fn,
+               _linear_regressor_fn,
+               fc_lib=feature_column_v2):
+    self._linear_classifier_fn = _linear_classifier_fn
+    self._linear_regressor_fn = _linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    # Create a directory to save our old checkpoint and vocabularies to.
+    self._ckpt_and_vocab_dir = tempfile.mkdtemp()
+
+    # Make a dummy input_fn.
+    def _input_fn():
+      features = {
+          'age': [[23.], [31.]],
+          'age_in_years': [[23.], [31.]],
+          'occupation': [['doctor'], ['consultant']]
+      }
+      return features, [0, 1]
+
+    self._input_fn = _input_fn
+
+  def tearDown(self):
+    # Clean up checkpoint / vocab dir.
+    tf.compat.v1.summary.FileWriterCache.clear()
+    shutil.rmtree(self._ckpt_and_vocab_dir)
+
+  def test_classifier_basic_warm_starting(self):
+    """Tests correctness of LinearClassifier default warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=linear_classifier.model_dir)
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_linear_classifier.get_variable_names():
+      # Learning rate is also checkpointed in V2 optimizer. So we need to make
+      # sure it uses the new value after warm started.
+      if 'learning_rate' in variable_name:
+        self.assertAllClose(
+            0.0,
+            warm_started_linear_classifier.get_variable_value(variable_name))
+      else:
+        self.assertAllClose(
+            linear_classifier.get_variable_value(variable_name),
+            warm_started_linear_classifier.get_variable_value(variable_name))
+
+  def test_regressor_basic_warm_starting(self):
+    """Tests correctness of LinearRegressor default warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearRegressor and train to save a checkpoint.
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        optimizer='SGD')
+    linear_regressor.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearRegressor, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_regressor = self._linear_regressor_fn(
+        feature_columns=[age],
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=linear_regressor.model_dir)
+
+    warm_started_linear_regressor.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_linear_regressor.get_variable_names():
+      # Learning rate is also checkpointed in V2 optimizer. So we need to make
+      # sure it uses the new value after warm started.
+      if 'learning_rate' in variable_name:
+        self.assertAllClose(
+            0.0,
+            warm_started_linear_regressor.get_variable_value(variable_name))
+      else:
+        self.assertAllClose(
+            linear_regressor.get_variable_value(variable_name),
+            warm_started_linear_regressor.get_variable_value(variable_name))
+
+  def test_warm_starting_selective_variables(self):
+    """Tests selecting variables to warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        # The provided regular expression will only warm-start the age variable
+        # and not the bias.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            vars_to_warm_start='.*(age).*'))
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    self.assertAllClose(
+        linear_classifier.get_variable_value(AGE_WEIGHT_NAME),
+        warm_started_linear_classifier.get_variable_value(AGE_WEIGHT_NAME))
+    # Bias should still be zero from initialization.
+    self.assertAllClose(
+        [0.0] * 4, warm_started_linear_classifier.get_variable_value(BIAS_NAME))
+
+  def test_warm_starting_with_vocab_remapping_and_partitioning(self):
+    """Tests warm-starting with vocab remapping and partitioning."""
+    vocab_list = ['doctor', 'lawyer', 'consultant']
+    vocab_file = os.path.join(self._ckpt_and_vocab_dir, 'occupation_vocab')
+    with open(vocab_file, 'w') as f:
+      f.write('\n'.join(vocab_list))
+    occupation = self._fc_lib.categorical_column_with_vocabulary_file(
+        'occupation',
+        vocabulary_file=vocab_file,
+        vocabulary_size=len(vocab_list))
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[occupation],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).  Use a new FeatureColumn with a
+    # different vocabulary for occupation.
+    new_vocab_list = ['doctor', 'consultant', 'engineer']
+    new_vocab_file = os.path.join(self._ckpt_and_vocab_dir,
+                                  'new_occupation_vocab')
+    with open(new_vocab_file, 'w') as f:
+      f.write('\n'.join(new_vocab_list))
+    new_occupation = self._fc_lib.categorical_column_with_vocabulary_file(
+        'occupation',
+        vocabulary_file=new_vocab_file,
+        vocabulary_size=len(new_vocab_list))
+    # We can create our VocabInfo object from the new and old occupation
+    # FeatureColumn's.
+    occupation_vocab_info = estimator.VocabInfo(
+        new_vocab=new_occupation.vocabulary_file,
+        new_vocab_size=new_occupation.vocabulary_size,
+        num_oov_buckets=new_occupation.num_oov_buckets,
+        old_vocab=occupation.vocabulary_file,
+        old_vocab_size=occupation.vocabulary_size,
+        # Can't use constant_initializer with load_and_remap.  In practice,
+        # use a truncated normal initializer.
+        backup_initializer=tf.compat.v1.initializers.random_uniform(
+            minval=0.39, maxval=0.39))
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[occupation],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            var_name_to_vocab_info={
+                OCCUPATION_WEIGHT_NAME: occupation_vocab_info
+            },
+            # Explicitly providing None here will only warm-start variables
+            # referenced in var_name_to_vocab_info (the bias will not be
+            # warm-started).
+            vars_to_warm_start=None))
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    # 'doctor' was ID-0 and still ID-0.
+    self.assertAllClose(
+        linear_classifier.get_variable_value(OCCUPATION_WEIGHT_NAME)[0, :],
+        warm_started_linear_classifier.get_variable_value(
+            OCCUPATION_WEIGHT_NAME)[0, :])
+    # 'consultant' was ID-2 and now ID-1.
+    self.assertAllClose(
+        linear_classifier.get_variable_value(OCCUPATION_WEIGHT_NAME)[2, :],
+        warm_started_linear_classifier.get_variable_value(
+            OCCUPATION_WEIGHT_NAME)[1, :])
+    # 'engineer' is a new entry and should be initialized with the
+    # backup_initializer in VocabInfo.
+    self.assertAllClose([0.39] * 4,
+                        warm_started_linear_classifier.get_variable_value(
+                            OCCUPATION_WEIGHT_NAME)[2, :])
+    # Bias should still be zero (from initialization logic).
+    self.assertAllClose(
+        [0.0] * 4, warm_started_linear_classifier.get_variable_value(BIAS_NAME))
+
+  def test_warm_starting_with_naming_change(self):
+    """Tests warm-starting with a Tensor name remapping."""
+    age_in_years = self._fc_lib.numeric_column('age_in_years')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age_in_years],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[self._fc_lib.numeric_column('age')],
+        n_classes=4,
+        optimizer=gradient_descent.SGD(learning_rate=0.0),
+        # The 'age' variable correspond to the 'age_in_years' variable in the
+        # previous model.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            var_name_to_prev_var_name={
+                AGE_WEIGHT_NAME: AGE_WEIGHT_NAME.replace('age', 'age_in_years')
+            }))
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    self.assertAllClose(
+        linear_classifier.get_variable_value(
+            AGE_WEIGHT_NAME.replace('age', 'age_in_years')),
+        warm_started_linear_classifier.get_variable_value(AGE_WEIGHT_NAME))
+    # The bias is also warm-started (with no name remapping).
+    self.assertAllClose(
+        linear_classifier.get_variable_value(BIAS_NAME),
+        warm_started_linear_classifier.get_variable_value(BIAS_NAME))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/metric_keys.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/metric_keys.py
new file mode 100644
index 00000000..2974306f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/metric_keys.py
@@ -0,0 +1,61 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Enum for model prediction keys."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow_estimator.python.estimator import model_fn
+
+
+class MetricKeys(object):
+  """Metric key strings."""
+  LOSS = model_fn.LOSS_METRIC_KEY
+  LOSS_MEAN = model_fn.AVERAGE_LOSS_METRIC_KEY
+  LOSS_REGULARIZATION = 'regularization_loss'
+
+  ACCURACY = 'accuracy'
+  PRECISION = 'precision'
+  RECALL = 'recall'
+  # This is the best the model could do by always predicting one class.
+  # Should be < ACCURACY in a trained model.
+  ACCURACY_BASELINE = 'accuracy_baseline'
+  AUC = 'auc'
+  AUC_PR = 'auc_precision_recall'
+  LABEL_MEAN = 'label/mean'
+  PREDICTION_MEAN = 'prediction/mean'
+
+  # The following require a threshold applied, should be float in range (0, 1).
+  ACCURACY_AT_THRESHOLD = 'accuracy/positive_threshold_%g'
+  PRECISION_AT_THRESHOLD = 'precision/positive_threshold_%g'
+  RECALL_AT_THRESHOLD = 'recall/positive_threshold_%g'
+
+  # The following require a constraint on a competing metric to be applied,
+  # float in range (0, 1).
+  PRECISION_AT_RECALL = 'precision_at_recall_%g'
+  RECALL_AT_PRECISION = 'recall_at_precision_%g'
+  SENSITIVITY_AT_SPECIFICITY = 'sensitivity_at_specificity_%g'
+  SPECIFICITY_AT_SENSITIVITY = 'specificity_at_sensitivity_%g'
+
+  # The following require a class id applied.
+  PROBABILITY_MEAN_AT_CLASS = 'probability_mean/class%d'
+  AUC_AT_CLASS = 'auc/class%d'
+  AUC_PR_AT_CLASS = 'auc_precision_recall/class%d'
+
+  # The following require a class name applied.
+  PROBABILITY_MEAN_AT_NAME = 'probability_mean/%s'
+  AUC_AT_NAME = 'auc/%s'
+  AUC_PR_AT_NAME = 'auc_precision_recall/%s'
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/optimizers.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/optimizers.py
new file mode 100644
index 00000000..0e8ad525
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/optimizers.py
@@ -0,0 +1,146 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Methods related to optimizers used in canned_estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import inspect
+import six
+import tensorflow as tf
+from tensorflow.python.keras.optimizer_v2 import adagrad as adagrad_v2
+from tensorflow.python.keras.optimizer_v2 import adam as adam_v2
+from tensorflow.python.keras.optimizer_v2 import ftrl as ftrl_v2
+from tensorflow.python.keras.optimizer_v2 import gradient_descent as gradient_descent_v2
+from tensorflow.python.keras.optimizer_v2 import optimizer_v2
+from tensorflow.python.keras.optimizer_v2 import rmsprop as rmsprop_v2
+
+_OPTIMIZER_CLS_NAMES = {
+    'Adagrad': tf.compat.v1.train.AdagradOptimizer,
+    'Adam': tf.compat.v1.train.AdamOptimizer,
+    'Ftrl': tf.compat.v1.train.FtrlOptimizer,
+    'RMSProp': tf.compat.v1.train.RMSPropOptimizer,
+    'SGD': tf.compat.v1.train.GradientDescentOptimizer,
+}
+
+_OPTIMIZER_CLS_NAMES_V2 = {
+    'Adagrad': adagrad_v2.Adagrad,
+    'Adam': adam_v2.Adam,
+    'Ftrl': ftrl_v2.Ftrl,
+    'RMSProp': rmsprop_v2.RMSProp,
+    'SGD': gradient_descent_v2.SGD,
+}
+
+# The default learning rate of 0.05 is a historical artifact of the initial
+# implementation, but seems a reasonable choice.
+_LEARNING_RATE = 0.05
+
+
+def get_optimizer_instance(opt, learning_rate=None):
+  """Returns an optimizer instance.
+
+  Supports the following types for the given `opt`:
+  * An `Optimizer` instance: Returns the given `opt`.
+  * A string: Creates an `Optimizer` subclass with the given `learning_rate`.
+    Supported strings:
+    * 'Adagrad': Returns an `AdagradOptimizer`.
+    * 'Adam': Returns an `AdamOptimizer`.
+    * 'Ftrl': Returns an `FtrlOptimizer`.
+    * 'RMSProp': Returns an `RMSPropOptimizer`.
+    * 'SGD': Returns a `GradientDescentOptimizer`.
+
+  Args:
+    opt: An `Optimizer` instance, or string, as discussed above.
+    learning_rate: A float. Only used if `opt` is a string.
+
+  Returns:
+    An `Optimizer` instance.
+
+  Raises:
+    ValueError: If `opt` is an unsupported string.
+    ValueError: If `opt` is a supported string but `learning_rate` was not
+      specified.
+    ValueError: If `opt` is none of the above types.
+  """
+  if isinstance(opt, six.string_types):
+    if opt in six.iterkeys(_OPTIMIZER_CLS_NAMES):
+      if not learning_rate:
+        raise ValueError('learning_rate must be specified when opt is string.')
+      return _OPTIMIZER_CLS_NAMES[opt](learning_rate=learning_rate)
+    raise ValueError(
+        'Unsupported optimizer name: {}. Supported names are: {}'.format(
+            opt, tuple(sorted(six.iterkeys(_OPTIMIZER_CLS_NAMES)))))
+  if callable(opt):
+    opt = opt()
+  if not isinstance(opt, tf.compat.v1.train.Optimizer):
+    raise ValueError(
+        'The given object is not an Optimizer instance. Given: {}'.format(opt))
+  return opt
+
+
+def _optimizer_has_default_learning_rate(opt):
+  signature = inspect.getargspec(opt.__init__)
+  default_name_to_value = dict(zip(signature.args[::-1], signature.defaults))
+  return 'learning_rate' in default_name_to_value
+
+
+def get_optimizer_instance_v2(opt, learning_rate=None):
+  """Returns an optimizer_v2.OptimizerV2 instance.
+
+  Supports the following types for the given `opt`:
+  * An `optimizer_v2.OptimizerV2` instance: Returns the given `opt`.
+  * A string: Creates an `optimizer_v2.OptimizerV2` subclass with the given
+  `learning_rate`.
+    Supported strings:
+    * 'Adagrad': Returns an tf.keras.optimizers.Adagrad.
+    * 'Adam': Returns an tf.keras.optimizers.Adam.
+    * 'Ftrl': Returns an tf.keras.optimizers.Ftrl.
+    * 'RMSProp': Returns an tf.keras.optimizers.RMSProp.
+    * 'SGD': Returns a tf.keras.optimizers.SGD.
+
+  Args:
+    opt: An `tf.keras.optimizers.Optimizer` instance, or string, as discussed
+      above.
+    learning_rate: A float. Only used if `opt` is a string. If None, and opt is
+      string, it will use the default learning_rate of the optimizer.
+
+  Returns:
+    An `tf.keras.optimizers.Optimizer` instance.
+
+  Raises:
+    ValueError: If `opt` is an unsupported string.
+    ValueError: If `opt` is a supported string but `learning_rate` was not
+      specified.
+    ValueError: If `opt` is none of the above types.
+  """
+  if isinstance(opt, six.string_types):
+    if opt in six.iterkeys(_OPTIMIZER_CLS_NAMES_V2):
+      if not learning_rate:
+        if _optimizer_has_default_learning_rate(_OPTIMIZER_CLS_NAMES_V2[opt]):
+          return _OPTIMIZER_CLS_NAMES_V2[opt]()
+        else:
+          return _OPTIMIZER_CLS_NAMES_V2[opt](learning_rate=_LEARNING_RATE)
+      return _OPTIMIZER_CLS_NAMES_V2[opt](learning_rate=learning_rate)
+    raise ValueError(
+        'Unsupported optimizer name: {}. Supported names are: {}'.format(
+            opt, tuple(sorted(six.iterkeys(_OPTIMIZER_CLS_NAMES_V2)))))
+  if callable(opt):
+    opt = opt()
+  if not isinstance(opt, optimizer_v2.OptimizerV2):
+    raise ValueError(
+        'The given object is not a tf.keras.optimizers.Optimizer instance.'
+        ' Given: {}'.format(opt))
+  return opt
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/parsing_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/parsing_utils.py
new file mode 100644
index 00000000..e236c57a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/parsing_utils.py
@@ -0,0 +1,353 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Parsing related helper function to be used in `input_fn`."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column_lib as fc
+from tensorflow.python.util.tf_export import estimator_export
+
+
+@estimator_export('estimator.classifier_parse_example_spec', v1=[])
+def classifier_parse_example_spec_v2(feature_columns,
+                                     label_key,
+                                     label_dtype=tf.dtypes.int64,
+                                     label_default=None,
+                                     weight_column=None):
+  """Generates parsing spec for tf.parse_example to be used with classifiers.
+
+  If users keep data in tf.Example format, they need to call tf.parse_example
+  with a proper feature spec. There are two main things that this utility helps:
+
+  * Users need to combine parsing spec of features with labels and weights
+    (if any) since they are all parsed from same tf.Example instance. This
+    utility combines these specs.
+  * It is difficult to map expected label by a classifier such as
+    `DNNClassifier` to corresponding tf.parse_example spec. This utility encodes
+    it by getting related information from users (key, dtype).
+
+  Example output of parsing spec:
+
+  ```python
+  # Define features and transformations
+  feature_b = tf.feature_column.numeric_column(...)
+  feature_c_bucketized = tf.feature_column.bucketized_column(
+    tf.feature_column.numeric_column("feature_c"), ...)
+  feature_a_x_feature_c = tf.feature_column.crossed_column(
+      columns=["feature_a", feature_c_bucketized], ...)
+
+  feature_columns = [feature_b, feature_c_bucketized, feature_a_x_feature_c]
+  parsing_spec = tf.estimator.classifier_parse_example_spec(
+      feature_columns, label_key='my-label', label_dtype=tf.string)
+
+  # For the above example, classifier_parse_example_spec would return the dict:
+  assert parsing_spec == {
+    "feature_a": parsing_ops.VarLenFeature(tf.string),
+    "feature_b": parsing_ops.FixedLenFeature([1], dtype=tf.float32),
+    "feature_c": parsing_ops.FixedLenFeature([1], dtype=tf.float32)
+    "my-label" : parsing_ops.FixedLenFeature([1], dtype=tf.string)
+  }
+  ```
+
+  Example usage with a classifier:
+
+  ```python
+  feature_columns = # define features via tf.feature_column
+  estimator = DNNClassifier(
+      n_classes=1000,
+      feature_columns=feature_columns,
+      weight_column='example-weight',
+      label_vocabulary=['photos', 'keep', ...],
+      hidden_units=[256, 64, 16])
+  # This label configuration tells the classifier the following:
+  # * weights are retrieved with key 'example-weight'
+  # * label is string and can be one of the following ['photos', 'keep', ...]
+  # * integer id for label 'photos' is 0, 'keep' is 1, ...
+
+
+  # Input builders
+  def input_fn_train():  # Returns a tuple of features and labels.
+    features = tf.contrib.learn.read_keyed_batch_features(
+        file_pattern=train_files,
+        batch_size=batch_size,
+        # creates parsing configuration for tf.parse_example
+        features=tf.estimator.classifier_parse_example_spec(
+            feature_columns,
+            label_key='my-label',
+            label_dtype=tf.string,
+            weight_column='example-weight'),
+        reader=tf.RecordIOReader)
+     labels = features.pop('my-label')
+     return features, labels
+
+  estimator.train(input_fn=input_fn_train)
+  ```
+
+  Args:
+    feature_columns: An iterable containing all feature columns. All items
+      should be instances of classes derived from `FeatureColumn`.
+    label_key: A string identifying the label. It means tf.Example stores labels
+      with this key.
+    label_dtype: A `tf.dtype` identifies the type of labels. By default it is
+      `tf.int64`. If user defines a `label_vocabulary`, this should be set as
+      `tf.string`. `tf.float32` labels are only supported for binary
+      classification.
+    label_default: used as label if label_key does not exist in given
+      tf.Example. An example usage: let's say `label_key` is 'clicked' and
+        tf.Example contains clicked data only for positive examples in following
+      format `key:clicked, value:1`. This means that if there is no data with
+        key 'clicked' it should count as negative example by setting
+        `label_deafault=0`. Type of this value should be compatible with
+        `label_dtype`.
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example. If it is a string, it is
+      used as a key to fetch weight tensor from the `features`. If it is a
+      `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+
+  Returns:
+    A dict mapping each feature key to a `FixedLenFeature` or `VarLenFeature`
+    value.
+
+  Raises:
+    ValueError: If label is used in `feature_columns`.
+    ValueError: If weight_column is used in `feature_columns`.
+    ValueError: If any of the given `feature_columns` is not a `_FeatureColumn`
+      instance.
+    ValueError: If `weight_column` is not a `NumericColumn` instance.
+    ValueError: if label_key is None.
+  """
+  parsing_spec = fc.make_parse_example_spec_v2(feature_columns)
+  label_spec = tf.io.FixedLenFeature((1,), label_dtype, label_default)
+  return _add_label_and_weight_to_parsing_spec(
+      parsing_spec=parsing_spec,
+      label_key=label_key,
+      label_spec=label_spec,
+      weight_column=weight_column)
+
+
+@estimator_export('estimator.regressor_parse_example_spec', v1=[])
+def regressor_parse_example_spec_v2(feature_columns,
+                                    label_key,
+                                    label_dtype=tf.dtypes.float32,
+                                    label_default=None,
+                                    label_dimension=1,
+                                    weight_column=None):
+  """Generates parsing spec for tf.parse_example to be used with regressors.
+
+  If users keep data in tf.Example format, they need to call tf.parse_example
+  with a proper feature spec. There are two main things that this utility helps:
+
+  * Users need to combine parsing spec of features with labels and weights
+    (if any) since they are all parsed from same tf.Example instance. This
+    utility combines these specs.
+  * It is difficult to map expected label by a regressor such as `DNNRegressor`
+    to corresponding tf.parse_example spec. This utility encodes it by getting
+    related information from users (key, dtype).
+
+  Example output of parsing spec:
+
+  ```python
+  # Define features and transformations
+  feature_b = tf.feature_column.numeric_column(...)
+  feature_c_bucketized = tf.feature_column.bucketized_column(
+    tf.feature_column.numeric_column("feature_c"), ...)
+  feature_a_x_feature_c = tf.feature_column.crossed_column(
+      columns=["feature_a", feature_c_bucketized], ...)
+
+  feature_columns = [feature_b, feature_c_bucketized, feature_a_x_feature_c]
+  parsing_spec = tf.estimator.regressor_parse_example_spec(
+      feature_columns, label_key='my-label')
+
+  # For the above example, regressor_parse_example_spec would return the dict:
+  assert parsing_spec == {
+    "feature_a": parsing_ops.VarLenFeature(tf.string),
+    "feature_b": parsing_ops.FixedLenFeature([1], dtype=tf.float32),
+    "feature_c": parsing_ops.FixedLenFeature([1], dtype=tf.float32)
+    "my-label" : parsing_ops.FixedLenFeature([1], dtype=tf.float32)
+  }
+  ```
+
+  Example usage with a regressor:
+
+  ```python
+  feature_columns = # define features via tf.feature_column
+  estimator = DNNRegressor(
+      hidden_units=[256, 64, 16],
+      feature_columns=feature_columns,
+      weight_column='example-weight',
+      label_dimension=3)
+  # This label configuration tells the regressor the following:
+  # * weights are retrieved with key 'example-weight'
+  # * label is a 3 dimension tensor with float32 dtype.
+
+
+  # Input builders
+  def input_fn_train():  # Returns a tuple of features and labels.
+    features = tf.contrib.learn.read_keyed_batch_features(
+        file_pattern=train_files,
+        batch_size=batch_size,
+        # creates parsing configuration for tf.parse_example
+        features=tf.estimator.classifier_parse_example_spec(
+            feature_columns,
+            label_key='my-label',
+            label_dimension=3,
+            weight_column='example-weight'),
+        reader=tf.RecordIOReader)
+     labels = features.pop('my-label')
+     return features, labels
+
+  estimator.train(input_fn=input_fn_train)
+  ```
+
+  Args:
+    feature_columns: An iterable containing all feature columns. All items
+      should be instances of classes derived from `_FeatureColumn`.
+    label_key: A string identifying the label. It means tf.Example stores labels
+      with this key.
+    label_dtype: A `tf.dtype` identifies the type of labels. By default it is
+      `tf.float32`.
+    label_default: used as label if label_key does not exist in given
+      tf.Example. By default default_value is none, which means
+      `tf.parse_example` will error out if there is any missing label.
+    label_dimension: Number of regression targets per example. This is the size
+      of the last dimension of the labels and logits `Tensor` objects
+      (typically, these have shape `[batch_size, label_dimension]`).
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example. If it is a string, it is
+      used as a key to fetch weight tensor from the `features`. If it is a
+      `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+
+  Returns:
+    A dict mapping each feature key to a `FixedLenFeature` or `VarLenFeature`
+    value.
+
+  Raises:
+    ValueError: If label is used in `feature_columns`.
+    ValueError: If weight_column is used in `feature_columns`.
+    ValueError: If any of the given `feature_columns` is not a `_FeatureColumn`
+      instance.
+    ValueError: If `weight_column` is not a `NumericColumn` instance.
+    ValueError: if label_key is None.
+  """
+  parsing_spec = fc.make_parse_example_spec_v2(feature_columns)
+  label_spec = tf.io.FixedLenFeature((label_dimension,), label_dtype,
+                                     label_default)
+  return _add_label_and_weight_to_parsing_spec(
+      parsing_spec=parsing_spec,
+      label_key=label_key,
+      label_spec=label_spec,
+      weight_column=weight_column)
+
+
+def _add_label_and_weight_to_parsing_spec(parsing_spec,
+                                          label_key,
+                                          label_spec,
+                                          weight_column=None):
+  """Adds label and weight spec to given parsing spec.
+
+  Args:
+    parsing_spec: A dict mapping each feature key to a `FixedLenFeature` or
+      `VarLenFeature` to which label and weight spec are added.
+    label_key: A string identifying the label. It means tf.Example stores labels
+      with this key.
+    label_spec: A `FixedLenFeature`.
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example. If it is a string, it is
+      used as a key to fetch weight tensor from the `features`. If it is a
+      `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+
+  Returns:
+    A dict mapping each feature key to a `FixedLenFeature` or `VarLenFeature`
+      value.
+  """
+  if label_key in parsing_spec:
+    raise ValueError('label should not be used as feature. '
+                     'label_key: {}, features: {}'.format(
+                         label_key, parsing_spec.keys()))
+  parsing_spec[label_key] = label_spec
+
+  if weight_column is None:
+    return parsing_spec
+
+  if isinstance(weight_column, six.string_types):
+    weight_column = tf.feature_column.numeric_column(weight_column)
+
+  if not isinstance(weight_column, fc.NumericColumn):
+    raise ValueError('weight_column should be an instance of '
+                     'tf.feature_column.numeric_column. '
+                     'Given type: {} value: {}'.format(
+                         type(weight_column), weight_column))
+
+  if weight_column.key in parsing_spec:
+    raise ValueError('weight_column should not be used as feature. '
+                     'weight_column: {}, features: {}'.format(
+                         weight_column.key, parsing_spec.keys()))
+
+  parsing_spec.update(weight_column.parse_example_spec)
+  return parsing_spec
+
+
+@estimator_export(v1=['estimator.classifier_parse_example_spec'])
+def classifier_parse_example_spec(feature_columns,
+                                  label_key,
+                                  label_dtype=tf.dtypes.int64,
+                                  label_default=None,
+                                  weight_column=None):
+  parsing_spec = tf.compat.v1.feature_column.make_parse_example_spec(
+      feature_columns)
+  label_spec = tf.io.FixedLenFeature((1,), label_dtype, label_default)
+  return _add_label_and_weight_to_parsing_spec(
+      parsing_spec=parsing_spec,
+      label_key=label_key,
+      label_spec=label_spec,
+      weight_column=weight_column)
+
+
+classifier_parse_example_spec.__doc__ = classifier_parse_example_spec_v2.__doc__
+
+
+@estimator_export(v1=['estimator.regressor_parse_example_spec'])
+def regressor_parse_example_spec(
+    feature_columns,  # pylint: disable=missing-docstring
+    label_key,
+    label_dtype=tf.dtypes.float32,
+    label_default=None,
+    label_dimension=1,
+    weight_column=None):
+  parsing_spec = tf.compat.v1.feature_column.make_parse_example_spec(
+      feature_columns)
+  label_spec = tf.io.FixedLenFeature((label_dimension,), label_dtype,
+                                     label_default)
+  return _add_label_and_weight_to_parsing_spec(
+      parsing_spec=parsing_spec,
+      label_key=label_key,
+      label_spec=label_spec,
+      weight_column=weight_column)
+
+
+regressor_parse_example_spec.__doc__ = regressor_parse_example_spec_v2.__doc__
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/prediction_keys.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/prediction_keys.py
new file mode 100644
index 00000000..3d79419e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/prediction_keys.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Enum for model prediction keys."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+
+class PredictionKeys(object):
+  """Enum for canonical model prediction keys.
+
+  The following values are defined:
+  PREDICTIONS: Used by models that predict values, such as regressor models.
+  """
+
+  CLASSES = 'classes'
+  CLASS_IDS = 'class_ids'
+  ALL_CLASSES = 'all_classes'
+  ALL_CLASS_IDS = 'all_class_ids'
+  LOGISTIC = 'logistic'
+  LOGITS = 'logits'
+  PREDICTIONS = 'predictions'
+  PROBABILITIES = 'probabilities'
+  TOP_K = 'top_k'
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/rnn.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/rnn.py
new file mode 100644
index 00000000..e8a44d7c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/rnn.py
@@ -0,0 +1,691 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Recurrent Neural Network model and estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+
+from tensorflow.python.feature_column import feature_column_lib as fc
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import activations
+from tensorflow.python.keras import layers as keras_layers
+from tensorflow.python.keras import models
+from tensorflow.python.keras.layers import recurrent_v2
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import optimizers
+from tensorflow_estimator.python.estimator.head import binary_class_head as binary_head_lib
+from tensorflow_estimator.python.estimator.head import multi_class_head as multi_head_lib
+from tensorflow_estimator.python.estimator.head import sequential_head as seq_head_lib
+
+# The defaults are historical artifacts of the initial implementation, but seem
+# reasonable choices.
+# TODO(aarg): Also apply default learning rate and clipping to Keras model so
+# they apply when the optimizer is set via `compile` and the model trained via
+# the `fit` method.
+_DEFAULT_LEARNING_RATE = 0.05
+_DEFAULT_CLIP_NORM = 5.0
+
+_SIMPLE_RNN_KEY = 'simple_rnn'
+_LSTM_KEY = 'lstm'
+_GRU_KEY = 'gru'
+
+_CELL_TYPE_TO_LAYER_MAPPING = {
+    _LSTM_KEY: recurrent_v2.LSTM,
+    _GRU_KEY: recurrent_v2.GRU,
+    _SIMPLE_RNN_KEY: keras_layers.SimpleRNN
+}
+
+_CELL_TYPES = {
+    _LSTM_KEY: recurrent_v2.LSTMCell,
+    _GRU_KEY: recurrent_v2.GRUCell,
+    _SIMPLE_RNN_KEY: keras_layers.SimpleRNNCell
+}
+
+# Indicates no value was provided by the user to a kwarg.
+USE_DEFAULT = object()
+
+
+def _single_rnn_cell(units, cell_type):
+  """Initializes a RNN cell."""
+  cell_type = _CELL_TYPES.get(cell_type, cell_type)
+  if not callable(cell_type):
+    raise ValueError(
+        '`cell_type` should be a class producing a RNN cell, or a string '
+        'specifying the cell type. Supported strings are: {}.'.format(
+            [_SIMPLE_RNN_KEY, _LSTM_KEY, _GRU_KEY]))
+  cell = cell_type(units=units)
+  if hasattr(cell, '_enable_caching_device'):
+    # Enable the caching_device to speed up the repeative varaible read in
+    # tf.while. This should work only with tf.session.
+    cell._enable_caching_device = True  # pylint: disable=protected-access
+  if not hasattr(cell, 'call') or not hasattr(cell, 'state_size'):
+    raise ValueError('RNN cell should have a `call` and `state_size` method.')
+  return cell
+
+
+def _make_rnn_cell_fn(units, cell_type=_SIMPLE_RNN_KEY):
+  """Convenience function to create `rnn_cell_fn` for canned RNN Estimators.
+
+  Args:
+    units: Iterable of integer number of hidden units per RNN layer.
+    cell_type: A class producing a RNN cell or a string specifying the cell
+      type. Supported strings are: `'simple_rnn'`, `'lstm'`, and `'gru'`.
+
+  Returns:
+    A function that returns a RNN cell.
+
+  Raises:
+    ValueError: If cell_type is not supported.
+  """
+
+  def rnn_cell_fn():
+    cells = [_single_rnn_cell(n, cell_type) for n in units]
+    if len(cells) == 1:
+      return cells[0]
+    return cells
+
+  return rnn_cell_fn
+
+
+class RNNModel(models.Model):
+  """A Keras RNN model.
+
+  Composition of layers to compute logits from RNN model, along with training
+  and inference features. See `tf.keras.models.Model` for more details on Keras
+  models.
+
+  Example of usage:
+
+  ```python
+  rating = tf.feature_column.embedding_column(
+      tf.feature_column.sequence_categorical_column_with_identity('rating', 5),
+      10)
+  rnn_layer = tf.keras.layers.SimpleRNN(20)
+  rnn_model = RNNModel(rnn_layer, units=1, sequence_feature_columns=[rating])
+
+  rnn_model.compile(
+      tf.keras.optimizers.Adam(), loss=tf.keras.losses.MeanSquaredError())
+  rnn_model.fit(generator(), epochs=10, steps_per_epoch=100)
+  rnn_model.predict({'rating': np.array([[0, 1], [2, 3]])}, steps=1)
+  ```
+  """
+
+  # TODO(aarg): Update arguments to support multiple rnn layers.
+  def __init__(self,
+               rnn_layer,
+               units,
+               sequence_feature_columns,
+               context_feature_columns=None,
+               activation=None,
+               return_sequences=False,
+               **kwargs):
+    """Initializes a RNNModel instance.
+
+    Args:
+      rnn_layer: A Keras RNN layer.
+      units: An int indicating the dimension of the logit layer, and of the
+        model output.
+      sequence_feature_columns: An iterable containing the `FeatureColumn`s that
+        represent sequential input. All items in the set should either be
+        sequence columns (e.g. `sequence_numeric_column`) or constructed from
+        one (e.g. `embedding_column` with `sequence_categorical_column_*` as
+        input).
+      context_feature_columns: An iterable containing the `FeatureColumn`s for
+        contextual input. The data represented by these columns will be
+        replicated and given to the RNN at each timestep. These columns must be
+        instances of classes derived from `DenseColumn` such as
+        `numeric_column`, not the sequential variants.
+      activation: Activation function to apply to the logit layer (for instance
+        `tf.keras.activations.sigmoid`). If you don't specify anything, no
+        activation is applied.
+      return_sequences: A boolean indicating whether to return the last output
+        in the output sequence, or the full sequence.
+      **kwargs: Additional arguments.
+
+    Raises:
+      ValueError: If `units` is not an int.
+    """
+    super(RNNModel, self).__init__(**kwargs)
+    if not isinstance(units, int):
+      raise ValueError('units must be an int.  Given type: {}'.format(
+          type(units)))
+    self._return_sequences = return_sequences
+    self._sequence_feature_columns = sequence_feature_columns
+    self._context_feature_columns = context_feature_columns
+    self._sequence_features_layer = fc.SequenceFeatures(
+        sequence_feature_columns)
+    self._dense_features_layer = None
+    if context_feature_columns:
+      self._dense_features_layer = tf.compat.v1.keras.layers.DenseFeatures(
+          context_feature_columns)
+    self._rnn_layer = rnn_layer
+    self._logits_layer = keras_layers.Dense(
+        units=units, activation=activation, name='logits')
+
+  def call(self, inputs, training=None):
+    """Computes the RNN output.
+
+    By default no activation is applied and the logits are returned. To output
+    probabilites an activation needs to be specified such as sigmoid or softmax.
+
+    Args:
+      inputs: A dict mapping keys to input tensors.
+      training: Python boolean indicating whether the layers should behave in
+        training mode or in inference mode. This argument is passed to the
+        model's layers. This is for instance used with cells that use dropout.
+
+    Returns:
+      A `Tensor` with logits from RNN model. It has shape
+      (batch_size, time_step, logits_size) if `return_sequence` is `True`,
+      (batch_size, logits_size) otherwise.
+    """
+    if not isinstance(inputs, dict):
+      raise ValueError('inputs should be a dictionary of `Tensor`s. '
+                       'Given type: {}'.format(type(inputs)))
+    with ops.name_scope('sequence_input_layer'):
+      try:
+        sequence_input, sequence_length = self._sequence_features_layer(
+            inputs, training=training)
+      except TypeError:
+        sequence_input, sequence_length = self._sequence_features_layer(inputs)
+      tf.compat.v1.summary.histogram('sequence_length', sequence_length)
+
+      if self._context_feature_columns:
+        try:
+          context_input = self._dense_features_layer(inputs, training=training)
+        except TypeError:
+          context_input = self._dense_features_layer(inputs)
+        sequence_input = fc.concatenate_context_input(
+            context_input, sequence_input=sequence_input)
+
+    sequence_length_mask = tf.sequence_mask(sequence_length)
+    rnn_outputs = self._rnn_layer(
+        sequence_input, mask=sequence_length_mask, training=training)
+
+    logits = self._logits_layer(rnn_outputs)
+    if self._return_sequences:
+      # Passes sequence mask as `_keras_mask` to be used in Keras model for
+      # loss and metrics aggregation to exclude padding in the sequential case.
+      logits._keras_mask = sequence_length_mask  # pylint: disable=protected-access
+    return logits
+
+  def get_config(self):
+    """Returns a dictionary with the config of the model."""
+    config = {'name': self.name}
+    config['rnn_layer'] = {
+        'class_name': self._rnn_layer.__class__.__name__,
+        'config': self._rnn_layer.get_config()
+    }
+    config['units'] = self._logits_layer.units
+    config['return_sequences'] = self._return_sequences
+    config['activation'] = activations.serialize(self._logits_layer.activation)
+    config['sequence_feature_columns'] = fc.serialize_feature_columns(
+        self._sequence_feature_columns)
+    config['context_feature_columns'] = (
+        fc.serialize_feature_columns(self._context_feature_columns)
+        if self._context_feature_columns else None)
+    return config
+
+  @classmethod
+  def from_config(cls, config, custom_objects=None):
+    """Creates a RNNModel from its config.
+
+    Args:
+      config: A Python dictionary, typically the output of `get_config`.
+      custom_objects: Optional dictionary mapping names (strings) to custom
+        classes or functions to be considered during deserialization.
+
+    Returns:
+      A RNNModel.
+    """
+    rnn_layer = keras_layers.deserialize(
+        config.pop('rnn_layer'), custom_objects=custom_objects)
+    sequence_feature_columns = fc.deserialize_feature_columns(
+        config.pop('sequence_feature_columns'), custom_objects=custom_objects)
+    context_feature_columns = config.pop('context_feature_columns', None)
+    if context_feature_columns:
+      context_feature_columns = fc.deserialize_feature_columns(
+          context_feature_columns, custom_objects=custom_objects)
+    activation = activations.deserialize(
+        config.pop('activation', None), custom_objects=custom_objects)
+    return cls(
+        rnn_layer=rnn_layer,
+        sequence_feature_columns=sequence_feature_columns,
+        context_feature_columns=context_feature_columns,
+        activation=activation,
+        **config)
+
+
+def _get_rnn_estimator_spec(features, labels, mode, head, rnn_model, optimizer,
+                            return_sequences):
+  """Computes `EstimatorSpec` from logits to use in estimator model function.
+
+  Args:
+    features: dict of `Tensor` and `SparseTensor` objects returned from
+      `input_fn`.
+    labels: `Tensor` of shape [batch_size, 1] or [batch_size] with labels.
+    mode: Defines whether this is training, evaluation or prediction. See
+      `ModeKeys`.
+    head: A `Head` instance.
+    rnn_model: A Keras model that computes RNN logits from features.
+    optimizer: String, `tf.keras.optimizers.Optimizer` object, or callable that
+      creates the optimizer to use for training. If not specified, will use the
+      Adagrad optimizer with a default learning rate of 0.05 and gradient clip
+      norm of 5.0.
+    return_sequences: A boolean indicating whether to return the last output in
+      the output sequence, or the full sequence.
+
+  Returns:
+    An `EstimatorSpec` instance.
+
+  Raises:
+    ValueError: If mode or optimizer is invalid, or features has the wrong type.
+  """
+  training = (mode == model_fn.ModeKeys.TRAIN)
+  # In TRAIN mode, create optimizer and assign global_step variable to
+  # optimizer.iterations to make global_step increased correctly, as Hooks
+  # relies on global step as step counter - otherwise skip optimizer
+  # initialization and set it to None.
+  if training:
+    # If user does not provide an optimizer instance, use the optimizer
+    # specified by the string with default learning rate and gradient clipping.
+    if isinstance(optimizer, six.string_types):
+      optimizer = optimizers.get_optimizer_instance_v2(
+          optimizer, learning_rate=_DEFAULT_LEARNING_RATE)
+      optimizer.clipnorm = _DEFAULT_CLIP_NORM
+    else:
+      optimizer = optimizers.get_optimizer_instance_v2(optimizer)
+    optimizer.iterations = tf.compat.v1.train.get_or_create_global_step()
+  else:
+    optimizer = None
+
+  logits = rnn_model(features, training)
+
+  if return_sequences and head.input_sequence_mask_key not in features:
+    features[head.input_sequence_mask_key] = logits._keras_mask  # pylint: disable=protected-access
+
+  return head.create_estimator_spec(
+      features=features,
+      mode=mode,
+      labels=labels,
+      optimizer=optimizer,
+      logits=logits,
+      update_ops=rnn_model.updates,
+      trainable_variables=rnn_model.trainable_variables)
+
+
+def _verify_rnn_cell_input(rnn_cell_fn, units, cell_type):
+  if rnn_cell_fn and (units or cell_type != USE_DEFAULT):
+    raise ValueError(
+        'units and cell_type must not be specified when using rnn_cell_fn')
+
+
+def _make_rnn_layer(rnn_cell_fn, units, cell_type, return_sequences):
+  """Assert arguments are valid and return rnn_layer_fn.
+
+  Args:
+    rnn_cell_fn: A function that returns a RNN cell instance that will be used
+      to construct the RNN.
+    units: Iterable of integer number of hidden units per RNN layer.
+    cell_type: A class producing a RNN cell or a string specifying the cell
+      type.
+    return_sequences: A boolean indicating whether to return the last output
+      in the output sequence, or the full sequence.:
+
+  Returns:
+    A tf.keras.layers.RNN layer.
+  """
+  _verify_rnn_cell_input(rnn_cell_fn, units, cell_type)
+  if cell_type in _CELL_TYPE_TO_LAYER_MAPPING and isinstance(units, int):
+    return _CELL_TYPE_TO_LAYER_MAPPING[cell_type](
+        units=units, return_sequences=return_sequences)
+  if not rnn_cell_fn:
+    if cell_type == USE_DEFAULT:
+      cell_type = _SIMPLE_RNN_KEY
+    rnn_cell_fn = _make_rnn_cell_fn(units, cell_type)
+
+  return keras_layers.RNN(cell=rnn_cell_fn(), return_sequences=return_sequences)
+
+
+@estimator_export('estimator.experimental.RNNEstimator', v1=[])
+class RNNEstimator(estimator.Estimator):
+  """An Estimator for TensorFlow RNN models with user-specified head.
+
+  Example:
+
+  ```python
+  token_sequence = sequence_categorical_column_with_hash_bucket(...)
+  token_emb = embedding_column(categorical_column=token_sequence, ...)
+
+  estimator = RNNEstimator(
+      head=tf.estimator.RegressionHead(),
+      sequence_feature_columns=[token_emb],
+      units=[32, 16], cell_type='lstm')
+
+  # Or with custom RNN cell:
+  def rnn_cell_fn(_):
+    cells = [ tf.keras.layers.LSTMCell(size) for size in [32, 16] ]
+    return tf.keras.layers.StackedRNNCells(cells)
+
+  estimator = RNNEstimator(
+      head=tf.estimator.RegressionHead(),
+      sequence_feature_columns=[token_emb],
+      rnn_cell_fn=rnn_cell_fn)
+
+  # Input builders
+  def input_fn_train: # returns x, y
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+
+  def input_fn_eval: # returns x, y
+    pass
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  def input_fn_predict: # returns x, None
+    pass
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if the head's `weight_column` is not `None`, a feature with
+    `key=weight_column` whose value is a `Tensor`.
+  * for each `column` in `sequence_feature_columns`:
+    - a feature with `key=column.name` whose `value` is a `SparseTensor`.
+  * for each `column` in `context_feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss and predicted output are determined by the specified head.
+
+  @compatibility(eager)
+  Estimators are not compatible with eager execution.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               head,
+               sequence_feature_columns,
+               context_feature_columns=None,
+               units=None,
+               cell_type=USE_DEFAULT,
+               rnn_cell_fn=None,
+               return_sequences=False,
+               model_dir=None,
+               optimizer='Adagrad',
+               config=None):
+    """Initializes a `RNNEstimator` instance.
+
+    Args:
+      head: A `Head` instance. This specifies the model's output and loss
+        function to be optimized.
+      sequence_feature_columns: An iterable containing the `FeatureColumn`s that
+        represent sequential input. All items in the set should either be
+        sequence columns (e.g. `sequence_numeric_column`) or constructed from
+        one (e.g. `embedding_column` with `sequence_categorical_column_*` as
+        input).
+      context_feature_columns: An iterable containing the `FeatureColumn`s for
+        contextual input. The data represented by these columns will be
+        replicated and given to the RNN at each timestep. These columns must be
+        instances of classes derived from `DenseColumn` such as
+        `numeric_column`, not the sequential variants.
+      units: Iterable of integer number of hidden units per RNN layer. If set,
+        `cell_type` must also be specified and `rnn_cell_fn` must be `None`.
+      cell_type: A class producing a RNN cell or a string specifying the cell
+        type. Supported strings are: `'simple_rnn'`, `'lstm'`, and `'gru'`. If
+          set, `units` must also be specified and `rnn_cell_fn` must be `None`.
+      rnn_cell_fn: A function that returns a RNN cell instance that will be used
+        to construct the RNN. If set, `units` and `cell_type` cannot be set.
+        This is for advanced users who need additional customization beyond
+        `units` and `cell_type`. Note that `tf.keras.layers.StackedRNNCells` is
+        needed for stacked RNNs.
+      return_sequences: A boolean indicating whether to return the last output
+        in the output sequence, or the full sequence.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      optimizer: An instance of `tf.Optimizer` or string specifying optimizer
+        type. Defaults to Adagrad optimizer.
+      config: `RunConfig` object to configure the runtime settings.
+
+    Note that a RNN cell has:
+      - a `call` method.
+      - a `state_size` attribute.
+      - a `output_size` attribute.
+      - a `get_initial_state` method.
+
+    See the documentation on `tf.keras.layers.RNN` for more details.
+
+    Raises:
+      ValueError: If `units`, `cell_type`, and `rnn_cell_fn` are not
+        compatible.
+    """
+
+    # TODO(aarg): Instead of raising an error convert head to sequential head.
+    if return_sequences and not isinstance(head, seq_head_lib._SequentialHead):  # pylint: disable=protected-access
+      raise ValueError('Provided head must be a `_SequentialHead` object when '
+                       '`return_sequences` is set to True.')
+    _verify_rnn_cell_input(rnn_cell_fn, units, cell_type)
+
+    def _model_fn(features, labels, mode, config):
+      """RNNEstimator model function."""
+      del config  # Unused.
+      rnn_layer = _make_rnn_layer(
+          rnn_cell_fn=rnn_cell_fn,
+          units=units,
+          cell_type=cell_type,
+          return_sequences=return_sequences)
+      rnn_model = RNNModel(
+          rnn_layer=rnn_layer,
+          units=head.logits_dimension,
+          sequence_feature_columns=sequence_feature_columns,
+          context_feature_columns=context_feature_columns,
+          return_sequences=return_sequences,
+          name='rnn_model')
+      return _get_rnn_estimator_spec(
+          features,
+          labels,
+          mode,
+          head=head,
+          rnn_model=rnn_model,
+          optimizer=optimizer,
+          return_sequences=return_sequences)
+
+    super(RNNEstimator, self).__init__(
+        model_fn=_model_fn, model_dir=model_dir, config=config)
+
+
+@estimator_export('estimator.experimental.RNNClassifier', v1=[])
+class RNNClassifier(RNNEstimator):
+  """A classifier for TensorFlow RNN models.
+
+  Trains a recurrent neural network model to classify instances into one of
+  multiple classes.
+
+  Example:
+
+  ```python
+  token_sequence = sequence_categorical_column_with_hash_bucket(...)
+  token_emb = embedding_column(categorical_column=token_sequence, ...)
+
+  estimator = RNNClassifier(
+      sequence_feature_columns=[token_emb],
+      units=[32, 16], cell_type='lstm')
+
+  # Input builders
+  def input_fn_train: # returns x, y
+    pass
+  estimator.train(input_fn=input_fn_train, steps=100)
+
+  def input_fn_eval: # returns x, y
+    pass
+  metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
+  def input_fn_predict: # returns x, None
+    pass
+  predictions = estimator.predict(input_fn=input_fn_predict)
+  ```
+
+  Input of `train` and `evaluate` should have following features,
+  otherwise there will be a `KeyError`:
+
+  * if `weight_column` is not `None`, a feature with
+    `key=weight_column` whose value is a `Tensor`.
+  * for each `column` in `sequence_feature_columns`:
+    - a feature with `key=column.name` whose `value` is a `SparseTensor`.
+  * for each `column` in `context_feature_columns`:
+    - if `column` is a `CategoricalColumn`, a feature with `key=column.name`
+      whose `value` is a `SparseTensor`.
+    - if `column` is a `WeightedCategoricalColumn`, two features: the first
+      with `key` the id column name, the second with `key` the weight column
+      name. Both features' `value` must be a `SparseTensor`.
+    - if `column` is a `DenseColumn`, a feature with `key=column.name`
+      whose `value` is a `Tensor`.
+
+  Loss is calculated by using softmax cross entropy.
+
+  @compatibility(eager)
+  Estimators are not compatible with eager execution.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               sequence_feature_columns,
+               context_feature_columns=None,
+               units=None,
+               cell_type=USE_DEFAULT,
+               rnn_cell_fn=None,
+               return_sequences=False,
+               model_dir=None,
+               n_classes=2,
+               weight_column=None,
+               label_vocabulary=None,
+               optimizer='Adagrad',
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               sequence_mask='sequence_mask',
+               config=None):
+    """Initializes a `RNNClassifier` instance.
+
+    Args:
+      sequence_feature_columns: An iterable containing the `FeatureColumn`s that
+        represent sequential input. All items in the set should either be
+        sequence columns (e.g. `sequence_numeric_column`) or constructed from
+        one (e.g. `embedding_column` with `sequence_categorical_column_*` as
+        input).
+      context_feature_columns: An iterable containing the `FeatureColumn`s for
+        contextual input. The data represented by these columns will be
+        replicated and given to the RNN at each timestep. These columns must be
+        instances of classes derived from `DenseColumn` such as
+        `numeric_column`, not the sequential variants.
+      units: Iterable of integer number of hidden units per RNN layer. If set,
+        `cell_type` must also be specified and `rnn_cell_fn` must be `None`.
+      cell_type: A class producing a RNN cell or a string specifying the cell
+        type. Supported strings are: `'simple_rnn'`, `'lstm'`, and `'gru'`. If
+          set, `units` must also be specified and `rnn_cell_fn` must be `None`.
+      rnn_cell_fn: A function that returns a RNN cell instance that will be used
+        to construct the RNN. If set, `units` and `cell_type` cannot be set.
+        This is for advanced users who need additional customization beyond
+        `units` and `cell_type`. Note that `tf.keras.layers.StackedRNNCells` is
+        needed for stacked RNNs.
+      return_sequences: A boolean indicating whether to return the last output
+        in the output sequence, or the full sequence. Note that if True,
+        `weight_column` must be None or a string.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model.
+      n_classes: Number of label classes. Defaults to 2, namely binary
+        classification. Must be > 1.
+      weight_column: A string or a `NumericColumn` created by
+        `tf.feature_column.numeric_column` defining feature column representing
+        weights. It is used to down weight or boost examples during training. It
+        will be multiplied by the loss of the example. If it is a string, it is
+        used as a key to fetch weight tensor from the `features`. If it is a
+        `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+        weight_column.normalizer_fn is applied on it to get weight tensor.
+      label_vocabulary: A list of strings represents possible label values. If
+        given, labels must be string type and have any value in
+        `label_vocabulary`. If it is not given, that means labels are already
+        encoded as integer or float within [0, 1] for `n_classes=2` and encoded
+        as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also
+        there will be errors if vocabulary is not provided and labels are
+        string.
+      optimizer: An instance of `tf.Optimizer` or string specifying optimizer
+        type. Defaults to Adagrad optimizer.
+      loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
+        to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+      sequence_mask: A string with the name of the sequence mask tensor. If
+        `sequence_mask` is in the features dictionary, the provided tensor is
+        used, otherwise the sequence mask is computed from the length of
+        sequential features. The sequence mask is used in evaluation and
+        training mode to aggregate loss and metrics computation while excluding
+        padding steps. It is also added to the predictions dictionary in
+        prediction mode to indicate which steps are padding.
+      config: `RunConfig` object to configure the runtime settings.
+
+    Note that a RNN cell has:
+      - a `call` method.
+      - a `state_size` attribute.
+      - a `output_size` attribute.
+      - a `get_initial_state` method.
+
+    See the documentation on `tf.keras.layers.RNN` for more details.
+
+    Raises:
+      ValueError: If `units`, `cell_type`, and `rnn_cell_fn` are not
+        compatible.
+    """
+    if n_classes == 2:
+      head = binary_head_lib.BinaryClassHead(
+          weight_column=weight_column,
+          label_vocabulary=label_vocabulary,
+          loss_reduction=loss_reduction)
+    else:
+      head = multi_head_lib.MultiClassHead(
+          n_classes=n_classes,
+          weight_column=weight_column,
+          label_vocabulary=label_vocabulary,
+          loss_reduction=loss_reduction)
+
+    if return_sequences:
+      tf.compat.v1.logging.info(
+          'Converting head to sequential head with '
+          '`SequentialHeadWrapper` to allow sequential predictions.')
+      head = seq_head_lib.SequentialHeadWrapper(
+          head,
+          sequence_length_mask=sequence_mask,
+          feature_columns=weight_column)
+
+    super(RNNClassifier, self).__init__(
+        head=head,
+        sequence_feature_columns=sequence_feature_columns,
+        context_feature_columns=context_feature_columns,
+        units=units,
+        cell_type=cell_type,
+        rnn_cell_fn=rnn_cell_fn,
+        return_sequences=return_sequences,
+        model_dir=model_dir,
+        optimizer=optimizer,
+        config=config)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/saved_model_estimator.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/saved_model_estimator.py
new file mode 100644
index 00000000..c426173c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/saved_model_estimator.py
@@ -0,0 +1,496 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Class that creates an Estimator from a SavedModel."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+
+import six
+import tensorflow as tf
+from tensorflow.python.saved_model import constants
+from tensorflow.python.saved_model import loader_impl
+from tensorflow.python.saved_model import signature_constants
+from tensorflow.python.saved_model import utils_impl as saved_model_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.export import export_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+@estimator_export('estimator.experimental.SavedModelEstimator')
+class SavedModelEstimator(estimator_lib.EstimatorV2):
+  """Create an Estimator from a SavedModel.
+
+  Only SavedModels exported with
+  `tf.estimator.Estimator.experimental_export_all_saved_models()` or
+  `tf.estimator.Estimator.export_saved_model()` are supported for this class.
+
+  Example with `tf.estimator.DNNClassifier`:
+
+  **Step 1: Create and train DNNClassifier.**
+
+  ```python
+  feature1 = tf.feature_column.embedding_column(
+      tf.feature_column.categorical_column_with_vocabulary_list(
+          key='feature1', vocabulary_list=('green', 'yellow')), dimension=1)
+  feature2 = tf.feature_column.numeric_column(key='feature2', default_value=0.0)
+
+  classifier = tf.estimator.DNNClassifier(
+      hidden_units=[4,2], feature_columns=[feature1, feature2])
+
+  def input_fn():
+    features = {'feature1': tf.constant(['green', 'green', 'yellow']),
+                'feature2': tf.constant([3.5, 4.2, 6.1])}
+    label = tf.constant([1., 0., 0.])
+    return tf.data.Dataset.from_tensors((features, label)).repeat()
+
+  classifier.train(input_fn=input_fn, steps=10)
+  ```
+
+  **Step 2: Export classifier.**
+  First, build functions that specify the expected inputs.
+
+  ```python
+  # During train and evaluation, both the features and labels should be defined.
+  supervised_input_receiver_fn = (
+      tf.estimator.experimental.build_raw_supervised_input_receiver_fn(
+          {'feature1': tf.placeholder(dtype=tf.string, shape=[None]),
+           'feature2': tf.placeholder(dtype=tf.float32, shape=[None])},
+          tf.placeholder(dtype=tf.float32, shape=[None])))
+
+  # During predict mode, expect to receive a `tf.Example` proto, so a parsing
+  # function is used.
+  serving_input_receiver_fn = (
+      tf.estimator.export.build_parsing_serving_input_receiver_fn(
+          tf.feature_column.make_parse_example_spec([feature1, feature2])))
+  ```
+
+  Next, export the model as a SavedModel. A timestamped directory will be
+  created (for example `/tmp/export_all/1234567890`).
+
+  ```python
+  # Option 1: Save all modes (train, eval, predict)
+  export_dir = classifier.experimental_export_all_saved_models(
+      '/tmp/export_all',
+      {tf.estimator.ModeKeys.TRAIN: supervised_input_receiver_fn,
+       tf.estimator.ModeKeys.EVAL: supervised_input_receiver_fn,
+       tf.estimator.ModeKeys.PREDICT: serving_input_receiver_fn})
+
+  # Option 2: Only export predict mode
+  export_dir = classifier.export_saved_model(
+      '/tmp/export_predict', serving_input_receiver_fn)
+  ```
+
+  **Step 3: Create a SavedModelEstimator from the exported SavedModel.**
+
+  ```python
+  est = tf.estimator.experimental.SavedModelEstimator(export_dir)
+
+  # If all modes were exported, you can immediately evaluate and predict, or
+  # continue training. Otherwise only predict is available.
+  eval_results = est.evaluate(input_fn=input_fn, steps=1)
+  print(eval_results)
+
+  est.train(input_fn=input_fn, steps=20)
+
+  def predict_input_fn():
+    example = tf.train.Example()
+    example.features.feature['feature1'].bytes_list.value.extend(['yellow'])
+    example.features.feature['feature2'].float_list.value.extend([1.])
+    return {'inputs':tf.constant([example.SerializeToString()])}
+
+  predictions = est.predict(predict_input_fn)
+  print(next(predictions))
+  ```
+  """
+
+  def __init__(self, saved_model_dir, model_dir=None):
+    """Initialize a SavedModelEstimator.
+
+    The SavedModelEstimator loads its model function and variable values from
+    the graphs defined in the SavedModel. There is no option to pass in
+    `RunConfig` or `params` arguments, because the model function graph is
+    defined statically in the SavedModel.
+
+    Args:
+      saved_model_dir: Directory containing SavedModel protobuf and subfolders.
+      model_dir: Directory to save new checkpoints during training.
+
+    Raises:
+      NotImplementedError: If a DistributionStrategy is defined in the config.
+        Unless the SavedModelEstimator is subclassed, this shouldn't happen.
+    """
+
+    super(SavedModelEstimator, self).__init__(
+        model_fn=self._model_fn_from_saved_model, model_dir=model_dir)
+    if self._train_distribution or self._eval_distribution:
+      raise NotImplementedError(
+          'SavedModelEstimator currently does not support '
+          'DistributionStrategy.')
+    self.saved_model_dir = saved_model_dir
+    self.saved_model_loader = loader_impl.SavedModelLoader(saved_model_dir)
+    self._available_modes = self._extract_available_modes()
+
+  def _extract_available_modes(self):
+    """Return list of modes found in SavedModel."""
+    available_modes = []
+    tf.compat.v1.logging.info(
+        'Checking available modes for SavedModelEstimator.')
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      try:
+        self._get_meta_graph_def_for_mode(mode)
+      except RuntimeError:
+        tf.compat.v1.logging.warn('%s mode not found in SavedModel.' % mode)
+        continue
+
+      if self._get_signature_def_for_mode(mode) is not None:
+        available_modes.append(mode)
+
+    tf.compat.v1.logging.info('Available modes for Estimator: %s' %
+                              available_modes)
+    return available_modes
+
+  def _validate_mode(self, mode):
+    """Make sure that mode can be run using the SavedModel."""
+    if mode not in self._available_modes:
+      raise RuntimeError('%s mode is not available in the SavedModel. Use '
+                         'saved_model_cli to check that the Metagraph for this '
+                         'mode has been exported.' % mode)
+
+  def _get_meta_graph_def_for_mode(self, mode):
+    tags = export_lib.EXPORT_TAG_MAP[mode]
+    return self.saved_model_loader.get_meta_graph_def_from_tags(tags)
+
+  def _get_signature_def_for_mode(self, mode):
+    meta_graph_def = self._get_meta_graph_def_for_mode(mode)
+    if mode == ModeKeys.PREDICT:
+      sig_def_key = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+    else:
+      sig_def_key = mode
+    if sig_def_key not in meta_graph_def.signature_def:
+      tf.compat.v1.logging.warn(
+          'Metagraph for mode %s was found, but SignatureDef with'
+          ' key \"%s\" is missing.' % (mode, sig_def_key))
+      return None
+    return meta_graph_def.signature_def[sig_def_key]
+
+  def _get_saver_def_from_mode(self, mode):
+    meta_graph_def = self._get_meta_graph_def_for_mode(mode)
+    return meta_graph_def.saver_def
+
+  def _create_and_assert_global_step(self, graph):
+    # Do nothing here. The global step variable will be created/loaded from the
+    # SavedModel. If a global step variable were created here, the result
+    # will be two duplicate global step variables, causing issues during
+    # the warm-start phase.
+    # Due to the global variable being created in the model function, this may
+    # cause issues when running DistributionStrategy. Thus, DistributionStrategy
+    # is not yet supported with SavedModelEstimator.
+    return None
+
+  def _model_fn_from_saved_model(self, features, labels, mode):
+    """Load a SavedModel graph and return an EstimatorSpec."""
+    # TODO(kathywu): Model function loads placeholders from the graph. Calling
+    # export_all_saved_models creates another placeholder for the inputs, on top
+    # of the original placeholders. There should be a way to avoid this.
+    self._validate_mode(mode)
+
+    g = tf.compat.v1.get_default_graph()
+    if tf.compat.v1.train.get_global_step(g) is not None:
+      raise RuntimeError(
+          'Graph must not contain a global step tensor before the SavedModel is'
+          ' loaded. Please make sure that the input function does not create a '
+          'global step.')
+
+    # Extract SignatureDef for information about the input and output tensors.
+    signature_def = self._get_signature_def_for_mode(mode)
+
+    # Generate input map for replacing the inputs in the SavedModel graph with
+    # the provided features and labels.
+    input_map = _generate_input_map(signature_def, features, labels)
+
+    # Create a list of the names of output tensors. When the graph is loaded,
+    # names of the output tensors may be remapped. This ensures that the correct
+    # tensors are returned in the EstimatorSpec.
+    output_tensor_names = [
+        value.name for value in six.itervalues(signature_def.outputs)
+    ]
+
+    # Load the graph. `output_tensors` contains output `Tensors` in the same
+    # same order as the `output_tensor_names` list.
+    tags = export_lib.EXPORT_TAG_MAP[mode]
+    _, output_tensors = self.saved_model_loader.load_graph(
+        g, tags, input_map=input_map, return_elements=output_tensor_names)
+
+    # Create saver object, and restore from the SavedModel `variables` directory
+    # if no checkpoints have been saved in the `model_dir`.
+    saver_obj = tf.compat.v1.train.Saver(
+        saver_def=self._get_saver_def_from_mode(mode))
+    init_fn = None
+    if not super(SavedModelEstimator, self).latest_checkpoint():
+      init_fn = self._restore_from_saver
+
+    # Create a scaffold from the MetaGraphDef that contains ops to initialize
+    # the graph. This should mirror the steps from _add_meta_graph_for_mode(),
+    # which creates a MetaGraphDef from the EstimatorSpec's scaffold.
+    # Get asset tensors, if any.
+    meta_graph_def = self._get_meta_graph_def_for_mode(mode)
+    asset_tensors_dictionary = loader_impl.get_asset_tensors(
+        self.saved_model_loader.export_dir, meta_graph_def, import_scope=None)
+    # TODO(kathywu): switch to loader_impl._get_main_op
+    scaffold = tf.compat.v1.train.Scaffold(
+        local_init_op=loader_impl._get_main_op_tensor(  # pylint: disable=protected-access
+            meta_graph_def),
+        local_init_feed_dict=asset_tensors_dictionary,
+        saver=saver_obj,
+        init_fn=init_fn)
+
+    # Ensure that a global step tensor has been created.
+    global_step_tensor = tf.compat.v1.train.get_global_step(g)
+    tf.compat.v1.train.assert_global_step(global_step_tensor)
+
+    # Extract values to return in the EstimatorSpec.
+    output_map = dict(zip(output_tensor_names, output_tensors))
+    outputs = {
+        key: output_map[value.name]
+        for key, value in six.iteritems(signature_def.outputs)
+    }
+
+    loss, predictions, metrics = _validate_and_extract_outputs(
+        mode, outputs, signature_def.method_name)
+
+    train_op = tf.compat.v1.get_collection(constants.TRAIN_OP_KEY)
+    if len(train_op) > 1:
+      raise RuntimeError('Multiple ops found in the train_op collection.')
+    train_op = None if not train_op else train_op[0]
+
+    _clear_saved_model_collections()
+    return model_fn_lib.EstimatorSpec(
+        scaffold=scaffold,
+        mode=mode,
+        loss=loss,
+        train_op=train_op,
+        predictions=predictions,
+        eval_metric_ops=metrics)
+
+  def _restore_from_saver(self, scaffold, session):
+    return scaffold.saver.restore(session,
+                                  _get_saved_model_ckpt(self.saved_model_dir))
+
+  def latest_checkpoint(self):
+    """Returns the filename of the latest saved checkpoint.
+
+    Returns:
+      Filename of latest checkpoint in `model_dir`. If no checkpoints are found
+      in `model_dir`, then the path to the SavedModel checkpoint is returned.
+    """
+    return (super(SavedModelEstimator, self).latest_checkpoint() or
+            _get_saved_model_ckpt(self.saved_model_dir))
+
+
+def _get_saved_model_ckpt(saved_model_dir):
+  """Return path to variables checkpoint in a `SavedModel` directory."""
+  if not tf.compat.v1.gfile.Exists(
+      os.path.join(
+          saved_model_utils.get_variables_dir(saved_model_dir),
+          tf.compat.as_text('variables.index'))):
+    raise ValueError('Directory provided has an invalid SavedModel format: %s' %
+                     saved_model_dir)
+  return saved_model_utils.get_variables_path(saved_model_dir)
+
+
+def _clear_saved_model_collections():
+  """Clear collections that are expected empty when exporting a SavedModel.
+
+  The SavedModel builder uses these collections to track ops necessary to
+  restore the graph state. These collections are expected to be empty before
+  MetaGraphs are added to the builder.
+  """
+  del tf.compat.v1.get_collection_ref(tf.saved_model.ASSETS_KEY)[:]
+  del tf.compat.v1.get_collection_ref(
+      tf.compat.v1.saved_model.LEGACY_INIT_OP_KEY)[:]
+  del tf.compat.v1.get_collection_ref(tf.compat.v1.saved_model.MAIN_OP_KEY)[:]
+  del tf.compat.v1.get_collection_ref(constants.TRAIN_OP_KEY)[:]
+
+
+def _generate_input_map(signature_def, features, labels):
+  """Return dict mapping an input tensor name to a feature or label tensor.
+
+  Args:
+    signature_def: SignatureDef loaded from SavedModel
+    features: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
+      `SparseTensor`, specifying the features to be passed to the model.
+    labels: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
+      `SparseTensor`, specifying the labels to be passed to the model. May be
+      `None`.
+
+  Returns:
+    dict mapping string names of inputs to features or labels tensors
+
+  Raises:
+    ValueError: if SignatureDef inputs are not completely mapped by the input
+      features and labels.
+  """
+  # Ensure that features and labels are dictionaries. If not, convert each to
+  # a dictionary with a single item. The default keys are different for features
+  # and labels.
+  features = export_lib.wrap_and_check_input_tensors(features, 'feature')
+  if labels is not None:
+    # Unlike features, labels may be None (in prediction mode)
+    labels = export_lib.wrap_and_check_input_tensors(labels, 'label')
+
+  inputs = signature_def.inputs
+  input_map = {}
+  for key, tensor_info in six.iteritems(inputs):
+    input_name = tensor_info.name
+    if ':' in input_name:
+      input_name = input_name[:input_name.find(':')]
+
+    # When tensors are used as control inputs for operations, their names are
+    # prepended with a '^' character in the GraphDef. To handle possible control
+    # flow edge cases, control input names must be included in the input map.
+    control_dependency_name = '^' + input_name
+
+    if key in features:
+      _check_same_dtype_and_shape(features[key], tensor_info, key)
+      input_map[input_name] = input_map[control_dependency_name] = features[key]
+    elif labels is not None and key in labels:
+      _check_same_dtype_and_shape(labels[key], tensor_info, key)
+      input_map[input_name] = input_map[control_dependency_name] = labels[key]
+    else:
+      raise ValueError(
+          'Key \"%s\" not found in features or labels passed in to the model '
+          'function. All required keys: %s' % (key, inputs.keys()))
+
+  return input_map
+
+
+def _check_same_dtype_and_shape(tensor, tensor_info, name):
+  """Validate that tensor has the same properties as the TensorInfo proto.
+
+  Args:
+    tensor: a `Tensor` object.
+    tensor_info: a `TensorInfo` proto.
+    name: Name of the input (to identify Tensor if an error is raised).
+
+  Raises:
+    ValueError: If the tensor shape or dtype don't match the TensorInfo
+  """
+  dtype_error = (tensor.dtype != tf.dtypes.DType(tensor_info.dtype))
+  shape_error = not tensor.shape.is_compatible_with(tensor_info.tensor_shape)
+
+  if dtype_error or shape_error:
+    msg = 'Tensor shape and/or dtype validation failed for input %s:' % name
+    if dtype_error:
+      msg += ('\n\tExpected dtype: %s, Got: %s' %
+              (tf.dtypes.DType(tensor_info.dtype), tensor.dtype))
+    if shape_error:
+      msg += ('\n\tExpected shape: %s, Got: %s' %
+              (tf.TensorShape(tensor_info.tensor_shape), tensor.shape))
+
+    raise ValueError(msg)
+
+
+def _extract_eval_metrics(output_dict):
+  """Return a eval metric dict extracted from the output_dict.
+
+  Eval metrics consist of a value tensor and an update op. Both must be in the
+  passed-in tensor dictionary for an eval metric to be added to the returned
+  dictionary.
+
+  Args:
+    output_dict: a dict that maps strings to tensors.
+
+  Returns:
+    dict mapping strings to (value, update_op) tuples.
+  """
+  # pylint: disable=protected-access
+  metric_ops = {}
+  separator_char = export_lib._SupervisedOutput._SEPARATOR_CHAR
+
+  for key, tensor in six.iteritems(output_dict):
+    split_key = key.split(separator_char)
+
+    # The metric name may contain the separator character, so recreate its name.
+    metric_name = separator_char.join(split_key[:-1])
+
+    if split_key[0] == export_lib._SupervisedOutput.METRICS_NAME:
+      # If the key ends with the value suffix, and there is a corresponding
+      # key ending with the update_op suffix, then add tensors to metrics dict.
+      if split_key[-1] == export_lib._SupervisedOutput.METRIC_VALUE_SUFFIX:
+        update_op = ''.join([
+            metric_name, separator_char,
+            export_lib._SupervisedOutput.METRIC_UPDATE_SUFFIX
+        ])
+        if update_op in output_dict:
+          update_op_tensor = output_dict[update_op]
+          metric_ops[metric_name] = (tensor, update_op_tensor)
+
+  # pylint: enable=protected-access
+  return metric_ops
+
+
+def _validate_and_extract_outputs(mode, output_dict, method_name):
+  """Extract values from SignatureDef output dictionary.
+
+  Args:
+    mode: One of the modes enumerated in `tf.estimator.ModeKeys`.
+    output_dict: dict of string SignatureDef keys to `Tensor`.
+    method_name: Method name of the SignatureDef as a string.
+
+  Returns:
+    Tuple of (
+      loss: `Tensor` object,
+      predictions: dictionary mapping string keys to `Tensor` objects,
+      metrics: dictionary mapping string keys to a tuple of two `Tensor` objects
+    )
+
+  Raises:
+    RuntimeError: raised if SignatureDef has an invalid method name for the mode
+  """
+  # pylint: disable=protected-access
+  loss, predictions, metrics = None, None, None
+
+  if mode == ModeKeys.PREDICT:
+    predictions = output_dict
+  else:
+    # Validate that the SignatureDef's method name matches the expected name for
+    # the given mode.
+    expected_method_name = signature_constants.SUPERVISED_TRAIN_METHOD_NAME
+    if mode == ModeKeys.EVAL:
+      expected_method_name = signature_constants.SUPERVISED_EVAL_METHOD_NAME
+    if method_name != expected_method_name:
+      raise RuntimeError(
+          'Invalid SignatureDef method name for mode %s.\n\tExpected: %s\n\t'
+          'Got: %s\nPlease ensure that the SavedModel was exported with '
+          '`tf.estimator.experimental_export_all_saved_models()`.' %
+          (mode, expected_method_name, method_name))
+
+    # Extract loss, metrics and predictions from the output dict.
+    loss = output_dict[export_lib._SupervisedOutput.LOSS_NAME]
+    metrics = _extract_eval_metrics(output_dict)
+    predictions = {
+        key: value
+        for key, value in six.iteritems(output_dict)
+        if key.split(export_lib._SupervisedOutput._SEPARATOR_CHAR)[0] == (
+            export_lib._SupervisedOutput.PREDICTIONS_NAME)
+    }
+
+  # pylint: enable=protected-access
+  return loss, predictions, metrics
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py
new file mode 100644
index 00000000..8ad20c9c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/model_utils.py
@@ -0,0 +1,76 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Helper functions for training and constructing time series Models."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import numpy
+import tensorflow as tf
+from tensorflow_estimator.python.estimator.canned.timeseries import feature_keys
+
+
+# TODO(agarwal): Remove and replace with functionality from tf.slim
+def fully_connected(inp,
+                    inp_size,
+                    layer_size,
+                    name,
+                    activation=tf.nn.relu,
+                    dtype=tf.dtypes.float32):
+  """Helper method to create a fully connected hidden layer."""
+  wt = tf.compat.v1.get_variable(
+      name="{}_weight".format(name), shape=[inp_size, layer_size], dtype=dtype)
+  bias = tf.compat.v1.get_variable(
+      name="{}_bias".format(name),
+      shape=[layer_size],
+      initializer=tf.compat.v1.initializers.zeros())
+  output = tf.compat.v1.nn.xw_plus_b(inp, wt, bias)
+  if activation is not None:
+    assert callable(activation)
+    output = activation(output)
+  return output
+
+
+def canonicalize_times_or_steps_from_output(times, steps,
+                                            previous_model_output):
+  """Canonicalizes either relative or absolute times, with error checking."""
+  if steps is not None and times is not None:
+    raise ValueError("Only one of `steps` and `times` may be specified.")
+  if steps is None and times is None:
+    raise ValueError("One of `steps` and `times` must be specified.")
+  if times is not None:
+    times = numpy.array(times)
+    if len(times.shape) != 2:
+      times = times[None, ...]
+    if (previous_model_output[feature_keys.FilteringResults.TIMES].shape[0] !=
+        times.shape[0]):
+      raise ValueError(
+          ("`times` must have a batch dimension matching"
+           " the previous model output (got a batch dimension of {} for `times`"
+           " and {} for the previous model output).").format(
+               times.shape[0], previous_model_output[
+                   feature_keys.FilteringResults.TIMES].shape[0]))
+    if not (previous_model_output[feature_keys.FilteringResults.TIMES][:, -1] <
+            times[:, 0]).all():
+      raise ValueError("Prediction times must be after the corresponding "
+                       "previous model output.")
+  if steps is not None:
+    predict_times = (
+        previous_model_output[feature_keys.FilteringResults.TIMES][:, -1:] + 1 +
+        numpy.arange(steps)[None, ...])
+  else:
+    predict_times = times
+  return predict_times
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py
new file mode 100644
index 00000000..16573507
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/saved_model_utils.py
@@ -0,0 +1,299 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Convenience functions for working with time series saved_models.
+
+@@predict_continuation
+@@cold_start_filter
+@@filter_continuation
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import numpy
+
+from tensorflow.python.util.all_util import remove_undocumented
+from tensorflow_estimator.python.estimator.canned.timeseries import feature_keys as _feature_keys
+from tensorflow_estimator.python.estimator.canned.timeseries import head as _head
+from tensorflow_estimator.python.estimator.canned.timeseries import model_utils as _model_utils
+
+
+def _canonicalize_numpy_data(data, require_single_batch):
+  """Do basic checking and reshaping for Numpy data.
+
+  Args:
+    data: A dictionary mapping keys to Numpy arrays, with several possible
+      shapes (requires keys `TrainEvalFeatures.TIMES` and
+      `TrainEvalFeatures.VALUES`): Single example; `TIMES` is a scalar and
+        `VALUES` is either a scalar or a vector of length [number of features].
+        Sequence; `TIMES` is a vector of shape [series length], `VALUES` either
+        has shape [series length] (univariate) or [series length x number of
+        features] (multivariate). Batch of sequences; `TIMES` is a vector of
+        shape [batch size x series length], `VALUES` has shape [batch size x
+        series length] or [batch size x series length x number of features]. In
+        any case, `VALUES` and any exogenous features must have their shapes
+        prefixed by the shape of the value corresponding to the `TIMES` key.
+    require_single_batch: If True, raises an error if the provided data has a
+      batch dimension > 1.
+
+  Returns:
+    A dictionary with features normalized to have shapes prefixed with [batch
+    size x series length]. The sizes of dimensions which were omitted in the
+    inputs are 1.
+  Raises:
+    ValueError: If dimensions are incorrect or do not match, or required
+      features are missing.
+  """
+  features = {key: numpy.array(value) for key, value in data.items()}
+  if (_feature_keys.TrainEvalFeatures.TIMES not in features or
+      _feature_keys.TrainEvalFeatures.VALUES not in features):
+    raise ValueError("{} and {} are required features.".format(
+        _feature_keys.TrainEvalFeatures.TIMES,
+        _feature_keys.TrainEvalFeatures.VALUES))
+  times = features[_feature_keys.TrainEvalFeatures.TIMES]
+  for key, value in features.items():
+    if value.shape[:len(times.shape)] != times.shape:
+      raise ValueError(
+          ("All features must have their shapes prefixed by the shape of the"
+           " times feature. Got shape {} for feature '{}', but shape {} for"
+           " '{}'").format(value.shape, key, times.shape,
+                           _feature_keys.TrainEvalFeatures.TIMES))
+  if not times.shape:  # a single example
+    if not features[_feature_keys.TrainEvalFeatures.VALUES].shape:  # univariate
+      # Add a feature dimension (with one feature)
+      features[_feature_keys.TrainEvalFeatures.VALUES] = features[
+          _feature_keys.TrainEvalFeatures.VALUES][..., None]
+    elif len(features[_feature_keys.TrainEvalFeatures.VALUES].shape) > 1:
+      raise ValueError(
+          ("Got an unexpected number of dimensions for the '{}' feature."
+           " Was expecting at most 1 dimension"
+           " ([number of features]) since '{}' does not "
+           "have a batch or time dimension, but got shape {}").format(
+               _feature_keys.TrainEvalFeatures.VALUES,
+               _feature_keys.TrainEvalFeatures.TIMES,
+               features[_feature_keys.TrainEvalFeatures.VALUES].shape))
+    # Add trivial batch and time dimensions for every feature
+    features = {key: value[None, None, ...] for key, value in features.items()}
+  if len(times.shape) == 1:  # shape [series length]
+    if len(features[_feature_keys.TrainEvalFeatures.VALUES].shape
+          ) == 1:  # shape [series length]
+      # Add a feature dimension (with one feature)
+      features[_feature_keys.TrainEvalFeatures.VALUES] = features[
+          _feature_keys.TrainEvalFeatures.VALUES][..., None]
+    elif len(features[_feature_keys.TrainEvalFeatures.VALUES].shape) > 2:
+      raise ValueError(
+          ("Got an unexpected number of dimensions for the '{}' feature."
+           " Was expecting at most 2 dimensions"
+           " ([series length, number of features]) since '{}' does not "
+           "have a batch dimension, but got shape {}").format(
+               _feature_keys.TrainEvalFeatures.VALUES,
+               _feature_keys.TrainEvalFeatures.TIMES,
+               features[_feature_keys.TrainEvalFeatures.VALUES].shape))
+    # Add trivial batch dimensions for every feature
+    features = {key: value[None, ...] for key, value in features.items()}
+  elif len(features[_feature_keys.TrainEvalFeatures.TIMES].shape
+          ) != 2:  # shape [batch size, series length]
+    raise ValueError(
+        ("Got an unexpected number of dimensions for times. Was expecting at "
+         "most two ([batch size, series length]), but got shape {}.").format(
+             times.shape))
+  if require_single_batch:
+    # We don't expect input to be already batched; batching is done later
+    if features[_feature_keys.TrainEvalFeatures.TIMES].shape[0] != 1:
+      raise ValueError("Got batch input, was expecting unbatched input.")
+  return features
+
+
+def _colate_features_to_feeds_and_fetches(signature,
+                                          features,
+                                          graph,
+                                          continue_from=None):
+  """Uses a saved model signature to construct feed and fetch dictionaries."""
+  if continue_from is None:
+    state_values = {}
+  elif _feature_keys.FilteringResults.STATE_TUPLE in continue_from:
+    # We're continuing from an evaluation, so we need to unpack/flatten state.
+    state_values = _head.state_to_dictionary(
+        continue_from[_feature_keys.FilteringResults.STATE_TUPLE])
+  else:
+    state_values = continue_from
+  input_feed_tensors_by_name = {
+      input_key: graph.as_graph_element(input_value.name)
+      for input_key, input_value in signature.inputs.items()
+  }
+  output_tensors_by_name = {
+      output_key: graph.as_graph_element(output_value.name)
+      for output_key, output_value in signature.outputs.items()
+  }
+  feed_dict = {}
+  for state_key, state_value in state_values.items():
+    feed_dict[input_feed_tensors_by_name[state_key]] = state_value
+  for feature_key, feature_value in features.items():
+    feed_dict[input_feed_tensors_by_name[feature_key]] = feature_value
+  return output_tensors_by_name, feed_dict
+
+
+def predict_continuation(continue_from,
+                         signatures,
+                         session,
+                         steps=None,
+                         times=None,
+                         exogenous_features=None):
+  """Perform prediction using an exported saved model.
+
+  Args:
+    continue_from: A dictionary containing the results of either an Estimator's
+      evaluate method or filter_continuation. Used to determine the model state
+      to make predictions starting from.
+    signatures: The `MetaGraphDef` protocol buffer returned from
+      `tf.saved_model.loader.load`. Used to determine the names of Tensors to
+      feed and fetch. Must be from the same model as `continue_from`.
+    session: The session to use. The session's graph must be the one into which
+      `tf.saved_model.loader.load` loaded the model.
+    steps: The number of steps to predict (scalar), starting after the
+      evaluation or filtering. If `times` is specified, `steps` must not be; one
+      is required.
+    times: A [batch_size x window_size] array of integers (not a Tensor)
+      indicating times to make predictions for. These times must be after the
+      corresponding evaluation or filtering. If `steps` is specified, `times`
+      must not be; one is required. If the batch dimension is omitted, it is
+      assumed to be 1.
+    exogenous_features: Optional dictionary. If specified, indicates exogenous
+      features for the model to use while making the predictions. Values must
+      have shape [batch_size x window_size x ...], where `batch_size` matches
+      the batch dimension used when creating `continue_from`, and `window_size`
+      is either the `steps` argument or the `window_size` of the `times`
+      argument (depending on which was specified).
+
+  Returns:
+    A dictionary with model-specific predictions (typically having keys "mean"
+    and "covariance") and a _feature_keys.PredictionResults.TIMES key indicating
+    the times for which the predictions were computed.
+  Raises:
+    ValueError: If `times` or `steps` are misspecified.
+  """
+  if exogenous_features is None:
+    exogenous_features = {}
+  predict_times = _model_utils.canonicalize_times_or_steps_from_output(
+      times=times, steps=steps, previous_model_output=continue_from)
+  features = {_feature_keys.PredictionFeatures.TIMES: predict_times}
+  features.update(exogenous_features)
+  predict_signature = signatures.signature_def[
+      _feature_keys.SavedModelLabels.PREDICT]
+  output_tensors_by_name, feed_dict = _colate_features_to_feeds_and_fetches(
+      continue_from=continue_from,
+      signature=predict_signature,
+      features=features,
+      graph=session.graph)
+  output = session.run(output_tensors_by_name, feed_dict=feed_dict)
+  output[_feature_keys.PredictionResults.TIMES] = features[
+      _feature_keys.PredictionFeatures.TIMES]
+  return output
+
+
+def cold_start_filter(signatures, session, features):
+  """Perform filtering using an exported saved model.
+
+  Filtering refers to updating model state based on new observations.
+  Predictions based on the returned model state will be conditioned on these
+  observations.
+
+  Starts from the model's default/uninformed state.
+
+  Args:
+    signatures: The `MetaGraphDef` protocol buffer returned from
+      `tf.saved_model.loader.load`. Used to determine the names of Tensors to
+      feed and fetch. Must be from the same model as `continue_from`.
+    session: The session to use. The session's graph must be the one into which
+      `tf.saved_model.loader.load` loaded the model.
+    features: A dictionary mapping keys to Numpy arrays, with several possible
+      shapes (requires keys `FilteringFeatures.TIMES` and
+      `FilteringFeatures.VALUES`): Single example; `TIMES` is a scalar and
+        `VALUES` is either a scalar or a vector of length [number of features].
+        Sequence; `TIMES` is a vector of shape [series length], `VALUES` either
+        has shape [series length] (univariate) or [series length x number of
+        features] (multivariate). Batch of sequences; `TIMES` is a vector of
+        shape [batch size x series length], `VALUES` has shape [batch size x
+        series length] or [batch size x series length x number of features]. In
+        any case, `VALUES` and any exogenous features must have their shapes
+        prefixed by the shape of the value corresponding to the `TIMES` key.
+
+  Returns:
+    A dictionary containing model state updated to account for the observations
+    in `features`.
+  """
+  filter_signature = signatures.signature_def[
+      _feature_keys.SavedModelLabels.COLD_START_FILTER]
+  features = _canonicalize_numpy_data(data=features, require_single_batch=False)
+  output_tensors_by_name, feed_dict = _colate_features_to_feeds_and_fetches(
+      signature=filter_signature, features=features, graph=session.graph)
+  output = session.run(output_tensors_by_name, feed_dict=feed_dict)
+  # Make it easier to chain filter -> predict by keeping track of the current
+  # time.
+  output[_feature_keys.FilteringResults.TIMES] = features[
+      _feature_keys.FilteringFeatures.TIMES]
+  return output
+
+
+def filter_continuation(continue_from, signatures, session, features):
+  """Perform filtering using an exported saved model.
+
+  Filtering refers to updating model state based on new observations.
+  Predictions based on the returned model state will be conditioned on these
+  observations.
+
+  Args:
+    continue_from: A dictionary containing the results of either an Estimator's
+      evaluate method or a previous filter step (cold start or continuation).
+      Used to determine the model state to start filtering from.
+    signatures: The `MetaGraphDef` protocol buffer returned from
+      `tf.saved_model.loader.load`. Used to determine the names of Tensors to
+      feed and fetch. Must be from the same model as `continue_from`.
+    session: The session to use. The session's graph must be the one into which
+      `tf.saved_model.loader.load` loaded the model.
+    features: A dictionary mapping keys to Numpy arrays, with several possible
+      shapes (requires keys `FilteringFeatures.TIMES` and
+      `FilteringFeatures.VALUES`): Single example; `TIMES` is a scalar and
+        `VALUES` is either a scalar or a vector of length [number of features].
+        Sequence; `TIMES` is a vector of shape [series length], `VALUES` either
+        has shape [series length] (univariate) or [series length x number of
+        features] (multivariate). Batch of sequences; `TIMES` is a vector of
+        shape [batch size x series length], `VALUES` has shape [batch size x
+        series length] or [batch size x series length x number of features]. In
+        any case, `VALUES` and any exogenous features must have their shapes
+        prefixed by the shape of the value corresponding to the `TIMES` key.
+
+  Returns:
+    A dictionary containing model state updated to account for the observations
+    in `features`.
+  """
+  filter_signature = signatures.signature_def[
+      _feature_keys.SavedModelLabels.FILTER]
+  features = _canonicalize_numpy_data(data=features, require_single_batch=False)
+  output_tensors_by_name, feed_dict = _colate_features_to_feeds_and_fetches(
+      continue_from=continue_from,
+      signature=filter_signature,
+      features=features,
+      graph=session.graph)
+  output = session.run(output_tensors_by_name, feed_dict=feed_dict)
+  # Make it easier to chain filter -> predict by keeping track of the current
+  # time.
+  output[_feature_keys.FilteringResults.TIMES] = features[
+      _feature_keys.FilteringFeatures.TIMES]
+  return output
+
+
+remove_undocumented(module_name=__name__)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/state_management.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/state_management.py
new file mode 100644
index 00000000..6cc08bea
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/timeseries/state_management.py
@@ -0,0 +1,98 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Classes for wrapping a model to operate on different data shapes."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+from tensorflow_estimator.python.estimator import estimator_lib
+from tensorflow_estimator.python.estimator.canned.timeseries import feature_keys
+
+
+class PassthroughStateManager(object):
+  """A minimal wrapper for models which do not need state management."""
+
+  def __init__(self):
+    self._input_statistics = None
+    self._graph_initialized = False
+
+  def initialize_graph(self, model, input_statistics=None):
+    """Adds required operations to the graph."""
+    del model  # unused
+    self._graph_initialized = True
+    self._input_statistics = input_statistics
+
+  def define_loss(self, model, features, mode):
+    """Wrap "model" with StateManager-specific operations.
+
+    Args:
+      model: The model (inheriting from TimeSeriesModel) to manage state for.
+      features: A dictionary with the following key/value pairs:
+        feature_keys.TrainEvalFeatures.TIMES: A [batch size x window size]
+          Tensor with times for each observation.
+        feature_keys.TrainEvalFeatures.VALUES: A [batch size x window size x num
+          features] Tensor with values for each observation.
+      mode: The tf.estimator.ModeKeys mode to use (TRAIN or EVAL).
+
+    Returns:
+      A ModelOutputs object.
+    Raises:
+      ValueError: If start state was specified.
+    """
+    if feature_keys.State.STATE_TUPLE in features:
+      raise ValueError(
+          "Overriding start state is not supported for this model.")
+    return model.define_loss(features, mode)
+
+
+class _OverridableStateManager(PassthroughStateManager):
+  """Base class for state managers which support overriding model state."""
+
+  @abc.abstractmethod
+  def _define_loss_with_saved_state(self, model, features, mode):
+    pass
+
+  def define_loss(self, model, features, mode):
+    """Switches between explicit start state and managed state."""
+    if feature_keys.FilteringFeatures.STATE_TUPLE in features:
+      # Explicit start state has been provided, so we should use that.
+      if mode == estimator_lib.ModeKeys.TRAIN:
+        raise ValueError(
+            "Overriding saved state for training is not supported (but a value "
+            "for feature {} was specified).".format(
+                feature_keys.FilteringFeatures.STATE_TUPLE))
+      start_state = features[feature_keys.FilteringFeatures.STATE_TUPLE]
+      del features[feature_keys.FilteringFeatures.STATE_TUPLE]
+      return model.get_batch_loss(
+          features=features, mode=mode, state=start_state)
+    else:
+      # No explicit start state; use managed state.
+      return self._define_loss_with_saved_state(
+          model=model, features=features, mode=mode)
+
+
+class FilteringOnlyStateManager(_OverridableStateManager):
+  """State manager for models which use state only for filtering.
+
+  Window-based models (ARModel) do not require state to be fed during training
+  (instead requiring a specific window size). Rather than requiring a minimum
+  window size for filtering, these models maintain this window in their state,
+  and so need state to be fed.
+  """
+
+  def _define_loss_with_saved_state(self, model, features, mode):
+    return model.define_loss(features, mode)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/dnn_testing_utils_v1.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/dnn_testing_utils_v1.py
new file mode 100644
index 00000000..fb307ff0
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/dnn_testing_utils_v1.py
@@ -0,0 +1,2127 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utils to be used in testing DNN estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import shutil
+import tempfile
+
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.core.framework import summary_pb2
+from tensorflow.python.feature_column import feature_column
+from tensorflow.python.framework import ops
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import head as head_lib
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.inputs import numpy_io
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+# pylint rules which are disabled by default for test files.
+# pylint: disable=invalid-name,protected-access,missing-docstring
+
+# Names of variables created by model.
+LEARNING_RATE_NAME = 'dnn/regression_head/dnn/learning_rate'
+HIDDEN_WEIGHTS_NAME_PATTERN = 'dnn/hiddenlayer_%d/kernel'
+HIDDEN_BIASES_NAME_PATTERN = 'dnn/hiddenlayer_%d/bias'
+BATCH_NORM_BETA_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/beta'
+BATCH_NORM_GAMMA_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/gamma'
+BATCH_NORM_MEAN_NAME_PATTERN = 'dnn/hiddenlayer_%d/batchnorm_%d/moving_mean'
+BATCH_NORM_VARIANCE_NAME_PATTERN = (
+    'dnn/hiddenlayer_%d/batchnorm_%d/moving_variance')
+LOGITS_WEIGHTS_NAME = 'dnn/logits/kernel'
+LOGITS_BIASES_NAME = 'dnn/logits/bias'
+OCCUPATION_EMBEDDING_NAME = ('dnn/input_from_feature_columns/input_layer/'
+                             'occupation_embedding/embedding_weights')
+CITY_EMBEDDING_NAME = ('dnn/input_from_feature_columns/input_layer/'
+                       'city_embedding/embedding_weights')
+
+# This is so that we can easily switch between feature_column and
+# feature_column_v2 for testing.
+feature_column.numeric_column = feature_column._numeric_column
+feature_column.categorical_column_with_hash_bucket = feature_column._categorical_column_with_hash_bucket  # pylint: disable=line-too-long
+feature_column.categorical_column_with_vocabulary_list = feature_column._categorical_column_with_vocabulary_list  # pylint: disable=line-too-long
+feature_column.categorical_column_with_vocabulary_file = feature_column._categorical_column_with_vocabulary_file  # pylint: disable=line-too-long
+feature_column.embedding_column = feature_column._embedding_column
+
+
+def assert_close(expected, actual, rtol=1e-04, message='', name='assert_close'):
+  with ops.name_scope(name, 'assert_close', (expected, actual, rtol)) as scope:
+    expected = ops.convert_to_tensor(expected, name='expected')
+    actual = ops.convert_to_tensor(actual, name='actual')
+    rdiff = tf.math.abs((expected - actual) / expected, 'diff')
+    rtol = ops.convert_to_tensor(rtol, name='rtol')
+    return tf.compat.v1.debugging.assert_less(
+        rdiff,
+        rtol,
+        data=(message, 'Condition expected =~ actual did not hold element-wise:'
+              'expected = ', expected, 'actual = ', actual, 'rdiff = ', rdiff,
+              'rtol = ', rtol,),
+        summarize=expected.get_shape().num_elements(),
+        name=scope)
+
+
+def create_checkpoint(weights_and_biases,
+                      global_step,
+                      model_dir,
+                      batch_norm_vars=None):
+  """Create checkpoint file with provided model weights.
+
+  Args:
+    weights_and_biases: Iterable of tuples of weight and bias values.
+    global_step: Initial global step to save in checkpoint.
+    model_dir: Directory into which checkpoint is saved.
+    batch_norm_vars: Variables used for batch normalization.
+  """
+  weights, biases = zip(*weights_and_biases)
+  if batch_norm_vars:
+    assert len(batch_norm_vars) == len(weights_and_biases) - 1
+    (bn_betas, bn_gammas, bn_means, bn_variances) = zip(*batch_norm_vars)
+  model_weights = {}
+
+  # Hidden layer weights.
+  for i in range(0, len(weights) - 1):
+    model_weights[HIDDEN_WEIGHTS_NAME_PATTERN % i] = weights[i]
+    model_weights[HIDDEN_BIASES_NAME_PATTERN % i] = biases[i]
+    if batch_norm_vars:
+      model_weights[BATCH_NORM_BETA_NAME_PATTERN % (i, i)] = bn_betas[i]
+      model_weights[BATCH_NORM_GAMMA_NAME_PATTERN % (i, i)] = bn_gammas[i]
+      model_weights[BATCH_NORM_MEAN_NAME_PATTERN % (i, i)] = bn_means[i]
+      model_weights[BATCH_NORM_VARIANCE_NAME_PATTERN % (i, i)] = bn_variances[i]
+
+  # Output layer weights.
+  model_weights[LOGITS_WEIGHTS_NAME] = weights[-1]
+  model_weights[LOGITS_BIASES_NAME] = biases[-1]
+
+  with tf.Graph().as_default():
+    # Create model variables.
+    for k, v in six.iteritems(model_weights):
+      tf.Variable(v, name=k, dtype=tf.dtypes.float32)
+
+    # Create non-model variables.
+    global_step_var = tf.compat.v1.train.create_global_step()
+
+    # Initialize vars and save checkpoint.
+    with tf.compat.v1.Session() as sess:
+      tf.compat.v1.initializers.global_variables().run()
+      global_step_var.assign(global_step).eval()
+      tf.compat.v1.train.Saver().save(sess,
+                                      os.path.join(model_dir, 'model.ckpt'))
+
+
+def mock_head(testcase, hidden_units, logits_dimension, expected_logits):
+  """Returns a mock head that validates logits values and variable names."""
+  hidden_weights_names = [(HIDDEN_WEIGHTS_NAME_PATTERN + '/part_0:0') % i
+                          for i in range(len(hidden_units))]
+  hidden_biases_names = [(HIDDEN_BIASES_NAME_PATTERN + '/part_0:0') % i
+                         for i in range(len(hidden_units))]
+  expected_var_names = (
+      hidden_weights_names + hidden_biases_names +
+      [LOGITS_WEIGHTS_NAME + '/part_0:0', LOGITS_BIASES_NAME + '/part_0:0'])
+
+  def _create_tpu_estimator_spec(features,
+                                 mode,
+                                 logits,
+                                 labels,
+                                 train_op_fn=None,
+                                 optimizer=None):
+    del features, labels  # Not used.
+    trainable_vars = tf.compat.v1.get_collection(
+        tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
+    testcase.assertItemsEqual(expected_var_names,
+                              [var.name for var in trainable_vars])
+    loss = tf.constant(1.)
+    assert_logits = assert_close(
+        expected_logits, logits, message='Failed for mode={}. '.format(mode))
+    with tf.control_dependencies([assert_logits]):
+      if mode == ModeKeys.TRAIN:
+        if train_op_fn is not None:
+          train_op = train_op_fn(loss)
+        elif optimizer is not None:
+          train_op = optimizer.minimize(loss, global_step=None)
+        return model_fn._TPUEstimatorSpec(
+            mode=mode, loss=loss, train_op=train_op)
+      elif mode == ModeKeys.EVAL:
+        return model_fn._TPUEstimatorSpec(mode=mode, loss=tf.identity(loss))
+      elif mode == ModeKeys.PREDICT:
+        return model_fn._TPUEstimatorSpec(
+            mode=mode, predictions={'logits': tf.identity(logits)})
+      else:
+        testcase.fail('Invalid mode: {}'.format(mode))
+
+  def _create_estimator_spec(features,
+                             mode,
+                             logits,
+                             labels,
+                             train_op_fn=None,
+                             optimizer=None):
+    tpu_spec = _create_tpu_estimator_spec(features, mode, logits, labels,
+                                          train_op_fn, optimizer)
+    return tpu_spec.as_estimator_spec()
+
+  head = tf.compat.v1.test.mock.NonCallableMagicMock(spec=head_lib._Head)
+  head.logits_dimension = logits_dimension
+  head._create_tpu_estimator_spec = tf.compat.v1.test.mock.MagicMock(
+      wraps=_create_tpu_estimator_spec)
+  head.create_estimator_spec = tf.compat.v1.test.mock.MagicMock(
+      wraps=_create_estimator_spec)
+
+  return head
+
+
+def mock_optimizer(testcase, hidden_units, expected_loss=None):
+  """Creates a mock optimizer to test the train method.
+
+  Args:
+    testcase: A TestCase instance.
+    hidden_units: Iterable of integer sizes for the hidden layers.
+    expected_loss: If given, will assert the loss value.
+
+  Returns:
+    A mock Optimizer.
+  """
+  hidden_weights_names = [(HIDDEN_WEIGHTS_NAME_PATTERN + '/part_0:0') % i
+                          for i in range(len(hidden_units))]
+  hidden_biases_names = [(HIDDEN_BIASES_NAME_PATTERN + '/part_0:0') % i
+                         for i in range(len(hidden_units))]
+  expected_var_names = (
+      hidden_weights_names + hidden_biases_names +
+      [LOGITS_WEIGHTS_NAME + '/part_0:0', LOGITS_BIASES_NAME + '/part_0:0'])
+
+  def _minimize(loss, global_step=None, var_list=None):
+    """Mock of optimizer.minimize."""
+    trainable_vars = var_list or tf.compat.v1.get_collection(
+        tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
+    testcase.assertItemsEqual(expected_var_names,
+                              [var.name for var in trainable_vars])
+
+    # Verify loss. We can't check the value directly, so we add an assert op.
+    testcase.assertEquals(0, loss.shape.ndims)
+    if expected_loss is None:
+      if global_step is not None:
+        return tf.compat.v1.assign_add(global_step, 1).op
+      return tf.no_op()
+    assert_loss = assert_close(
+        tf.cast(expected_loss, name='expected', dtype=tf.dtypes.float32),
+        loss,
+        name='assert_loss')
+    with tf.control_dependencies((assert_loss,)):
+      if global_step is not None:
+        return tf.compat.v1.assign_add(global_step, 1).op
+      return tf.no_op()
+
+  optimizer_mock = tf.compat.v1.test.mock.NonCallableMagicMock(
+      spec=tf.compat.v1.train.Optimizer,
+      wraps=tf.compat.v1.train.Optimizer(
+          use_locking=False, name='my_optimizer'))
+  optimizer_mock.minimize = tf.compat.v1.test.mock.MagicMock(wraps=_minimize)
+
+  return optimizer_mock
+
+
+class BaseDNNModelFnTest(object):
+  """Tests that _dnn_model_fn passes expected logits to mock head."""
+
+  def __init__(self, dnn_model_fn, fc_impl=feature_column):
+    self._dnn_model_fn = dnn_model_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_logits(self, mode, hidden_units, logits_dimension, inputs,
+                   expected_logits):
+    """Tests that the expected logits are passed to mock head."""
+    with tf.Graph().as_default():
+      tf.compat.v1.train.create_global_step()
+      head = mock_head(
+          self,
+          hidden_units=hidden_units,
+          logits_dimension=logits_dimension,
+          expected_logits=expected_logits)
+      estimator_spec = self._dnn_model_fn(
+          features={'age': tf.constant(inputs)},
+          labels=tf.constant([[1]]),
+          mode=mode,
+          head=head,
+          hidden_units=hidden_units,
+          feature_columns=[
+              self._fc_impl.numeric_column(
+                  'age', shape=np.array(inputs).shape[1:])
+          ],
+          optimizer=mock_optimizer(self, hidden_units))
+      with tf.compat.v1.train.MonitoredTrainingSession(
+          checkpoint_dir=self._model_dir) as sess:
+        if mode == ModeKeys.TRAIN:
+          sess.run(estimator_spec.train_op)
+        elif mode == ModeKeys.EVAL:
+          sess.run(estimator_spec.loss)
+        elif mode == ModeKeys.PREDICT:
+          sess.run(estimator_spec.predictions)
+        else:
+          self.fail('Invalid mode: {}'.format(mode))
+
+  def test_one_dim_logits(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +1*0 +0.3]] = [[-2.08]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10.]],
+          expected_logits=[[-2.08]])
+
+  def test_multi_dim_logits(self):
+    """Tests multi-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38]]
+           = [[-2.08, 2.08, 1.19]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.]],
+          expected_logits=[[-2.08, 2.08, 1.19]])
+
+  def test_multi_example_multi_dim_logits(self):
+    """Tests multiple examples and multi-dimensional logits.
+
+    input_layer = [[10], [5]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)],
+                      [relu(0.6*5 +0.1), relu(0.5*5 -0.1)]]
+                   = [[6.1, 4.9], [3.1, 2.4]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)],
+                      [relu(1*3.1 -0.8*2.4 +0.2), relu(0.8*3.1 -1*2.4 -0.1)]]
+                   = [[2.38, 0], [1.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38],
+              [-1*1.38 +0.3, 1*1.38 -0.3, 0.5*1.38]]
+           = [[-2.08, 2.08, 1.19], [-1.08, 1.08, 0.69]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.], [5.]],
+          expected_logits=[[-2.08, 2.08, 1.19], [-1.08, 1.08, .69]])
+
+  def test_multi_dim_input_one_dim_logits(self):
+    """Tests multi-dimensional inputs and one-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 +1*0 +0.3]] = [[-0.48]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48]])
+
+  def test_multi_dim_input_multi_dim_logits(self):
+    """Tests multi-dimensional inputs and multi-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 + 0.3, 1*0.78 -0.3, 0.5*0.78]] = [[-0.48, 0.48, 0.39]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48, 0.48, 0.39]])
+
+  def test_multi_feature_column_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        tf.compat.v1.train.create_global_step()
+        head = mock_head(
+            self,
+            hidden_units=hidden_units,
+            logits_dimension=logits_dimension,
+            expected_logits=expected_logits)
+        estimator_spec = self._dnn_model_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            labels=tf.constant([[1]]),
+            mode=mode,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column('age'),
+                self._fc_impl.numeric_column('height')
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          if mode == ModeKeys.TRAIN:
+            sess.run(estimator_spec.train_op)
+          elif mode == ModeKeys.EVAL:
+            sess.run(estimator_spec.loss)
+          elif mode == ModeKeys.PREDICT:
+            sess.run(estimator_spec.predictions)
+          else:
+            self.fail('Invalid mode: {}'.format(mode))
+
+  def test_multi_feature_column_mix_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        tf.compat.v1.train.create_global_step()
+        head = mock_head(
+            self,
+            hidden_units=hidden_units,
+            logits_dimension=logits_dimension,
+            expected_logits=expected_logits)
+        estimator_spec = self._dnn_model_fn(
+            features={
+                'age': tf.constant(inputs[0]),
+                'height': tf.constant(inputs[1])
+            },
+            labels=tf.constant([[1]]),
+            mode=mode,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                feature_column.numeric_column('age'),
+                tf.feature_column.numeric_column('height')
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          if mode == ModeKeys.TRAIN:
+            sess.run(estimator_spec.train_op)
+          elif mode == ModeKeys.EVAL:
+            sess.run(estimator_spec.loss)
+          elif mode == ModeKeys.PREDICT:
+            sess.run(estimator_spec.predictions)
+          else:
+            self.fail('Invalid mode: {}'.format(mode))
+
+  def test_features_tensor_raises_value_error(self):
+    """Tests that passing a Tensor for features raises a ValueError."""
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[0, 0, 0]]
+
+    with tf.Graph().as_default():
+      tf.compat.v1.train.create_global_step()
+      head = mock_head(
+          self,
+          hidden_units=hidden_units,
+          logits_dimension=logits_dimension,
+          expected_logits=expected_logits)
+      with self.assertRaisesRegexp(ValueError, 'features should be a dict'):
+        self._dnn_model_fn(
+            features=tf.constant(inputs),
+            labels=tf.constant([[1]]),
+            mode=ModeKeys.TRAIN,
+            head=head,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column(
+                    'age', shape=np.array(inputs).shape[1:])
+            ],
+            optimizer=mock_optimizer(self, hidden_units))
+
+
+class BaseDNNLogitFnTest(object):
+  """Tests correctness of logits calculated from _dnn_logit_fn_builder."""
+
+  def __init__(self, dnn_logit_fn_builder, fc_impl=feature_column):
+    self._dnn_logit_fn_builder = dnn_logit_fn_builder
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_logits(self,
+                   mode,
+                   hidden_units,
+                   logits_dimension,
+                   inputs,
+                   expected_logits,
+                   batch_norm=False):
+    """Tests that the expected logits are calculated."""
+    with tf.Graph().as_default():
+      # Global step needed for MonitoredSession, which is in turn used to
+      # explicitly set variable weights through a checkpoint.
+      tf.compat.v1.train.create_global_step()
+      # Use a variable scope here with 'dnn', emulating the dnn model_fn, so
+      # the checkpoint naming is shared.
+      with tf.compat.v1.variable_scope('dnn'):
+        input_layer_partitioner = (
+            tf.compat.v1.min_max_variable_partitioner(
+                max_partitions=0, min_slice_size=64 << 20))
+        logit_fn = self._dnn_logit_fn_builder(
+            units=logits_dimension,
+            hidden_units=hidden_units,
+            feature_columns=[
+                self._fc_impl.numeric_column(
+                    'age', shape=np.array(inputs).shape[1:])
+            ],
+            activation_fn=tf.nn.relu,
+            dropout=None,
+            input_layer_partitioner=input_layer_partitioner,
+            batch_norm=batch_norm)
+        logits = logit_fn(features={'age': tf.constant(inputs)}, mode=mode)
+        with tf.compat.v1.train.MonitoredTrainingSession(
+            checkpoint_dir=self._model_dir) as sess:
+          self.assertAllClose(expected_logits, sess.run(logits))
+
+  def test_one_dim_logits(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +1*0 +0.3]] = [[-2.08]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10.]],
+          expected_logits=[[-2.08]])
+
+  def test_one_dim_logits_with_batch_norm(self):
+    """Tests one-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +1), relu(0.5*10 -1)]] = [[7, 4]]
+    hidden_layer_0 = [[relu(0.6*20 +1), relu(0.5*20 -1)]] = [[13, 9]]
+
+    batch_norm_0, training (epsilon = 0.001):
+      mean1 = 1/2*(7+13) = 10,
+      variance1 = 1/2*(3^2+3^2) = 9
+      x11 = (7-10)/sqrt(9+0.001) = -0.999944449,
+      x21 = (13-10)/sqrt(9+0.001) = 0.999944449,
+
+      mean2 = 1/2*(4+9) = 6.5,
+      variance2 = 1/2*(2.5^2+.2.5^2) = 6.25
+      x12 = (4-6.5)/sqrt(6.25+0.001) = -0.99992001,
+      x22 = (9-6.5)/sqrt(6.25+0.001) = 0.99992001,
+
+    logits = [[-1*(-0.999944449) + 2*(-0.99992001) + 0.3],
+              [-1*0.999944449 + 2*0.99992001 + 0.3]]
+           = [[-0.699895571],[1.299895571]]
+
+    batch_norm_0, not training (epsilon = 0.001):
+      moving_mean1 = 0, moving_variance1 = 1
+      x11 = (7-0)/sqrt(1+0.001) = 6.996502623,
+      x21 = (13-0)/sqrt(1+0.001) = 12.993504871,
+      moving_mean2 = 0, moving_variance2 = 1
+      x12 = (4-0)/sqrt(1+0.001) = 3.998001499,
+      x22 = (9-0)/sqrt(1+0.001) = 8.995503372,
+
+    logits = [[-1*6.996502623 + 2*3.998001499 + 0.3],
+              [-1*12.993504871 + 2*8.995503372 + 0.3]]
+           = [[1.299500375],[5.297501873]]
+    """
+    base_global_step = 100
+    create_checkpoint(
+        (
+            ([[.6, .5]], [1., -1.]),
+            ([[-1.], [2.]], [.3]),
+        ),
+        base_global_step,
+        self._model_dir,
+        batch_norm_vars=(
+            [
+                [0, 0],  # beta.
+                [1, 1],  # gamma.
+                [0, 0],  # moving mean.
+                [1, 1],  # moving variance.
+            ],))
+    self._test_logits(
+        ModeKeys.TRAIN,
+        hidden_units=[2],
+        logits_dimension=1,
+        inputs=[[10.], [20.]],
+        expected_logits=[[-0.699895571], [1.299895571]],
+        batch_norm=True)
+    for mode in [ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=[2],
+          logits_dimension=1,
+          inputs=[[10.], [20.]],
+          expected_logits=[[1.299500375], [5.297501873]],
+          batch_norm=True)
+
+  def test_multi_dim_logits(self):
+    """Tests multi-dimensional logits.
+
+    input_layer = [[10]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)]] = [[6.1, 4.9]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)]]
+                   = [[relu(2.38), relu(-0.12)]] = [[2.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38]]
+           = [[-2.08, 2.08, 1.19]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.]],
+          expected_logits=[[-2.08, 2.08, 1.19]])
+
+  def test_multi_example_multi_dim_logits(self):
+    """Tests multiple examples and multi-dimensional logits.
+
+    input_layer = [[10], [5]]
+    hidden_layer_0 = [[relu(0.6*10 +0.1), relu(0.5*10 -0.1)],
+                      [relu(0.6*5 +0.1), relu(0.5*5 -0.1)]]
+                   = [[6.1, 4.9], [3.1, 2.4]]
+    hidden_layer_1 = [[relu(1*6.1 -0.8*4.9 +0.2), relu(0.8*6.1 -1*4.9 -0.1)],
+                      [relu(1*3.1 -0.8*2.4 +0.2), relu(0.8*3.1 -1*2.4 -0.1)]]
+                   = [[2.38, 0], [1.38, 0]]
+    logits = [[-1*2.38 +0.3, 1*2.38 -0.3, 0.5*2.38],
+              [-1*1.38 +0.3, 1*1.38 -0.3, 0.5*1.38]]
+           = [[-2.08, 2.08, 1.19], [-1.08, 1.08, 0.69]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10.], [5.]],
+          expected_logits=[[-2.08, 2.08, 1.19], [-1.08, 1.08, .69]])
+
+  def test_multi_dim_input_one_dim_logits(self):
+    """Tests multi-dimensional inputs and one-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 +1*0 +0.3]] = [[-0.48]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=1,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48]])
+
+  def test_multi_dim_input_multi_dim_logits(self):
+    """Tests multi-dimensional inputs and multi-dimensional logits.
+
+    input_layer = [[10, 8]]
+    hidden_layer_0 = [[relu(0.6*10 -0.6*8 +0.1), relu(0.5*10 -0.5*8 -0.1)]]
+                   = [[1.3, 0.9]]
+    hidden_layer_1 = [[relu(1*1.3 -0.8*0.9 + 0.2), relu(0.8*1.3 -1*0.9 -0.2)]]
+                   = [[0.78, relu(-0.06)]] = [[0.78, 0]]
+    logits = [[-1*0.78 + 0.3, 1*0.78 -0.3, 0.5*0.78]] = [[-0.48, 0.48, 0.39]]
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      self._test_logits(
+          mode,
+          hidden_units=(2, 2),
+          logits_dimension=3,
+          inputs=[[10., 8.]],
+          expected_logits=[[-0.48, 0.48, 0.39]])
+
+  def test_multi_feature_column_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        # Global step needed for MonitoredSession, which is in turn used to
+        # explicitly set variable weights through a checkpoint.
+        tf.compat.v1.train.create_global_step()
+        # Use a variable scope here with 'dnn', emulating the dnn model_fn, so
+        # the checkpoint naming is shared.
+        with tf.compat.v1.variable_scope('dnn'):
+          input_layer_partitioner = (
+              tf.compat.v1.min_max_variable_partitioner(
+                  max_partitions=0, min_slice_size=64 << 20))
+          logit_fn = self._dnn_logit_fn_builder(
+              units=logits_dimension,
+              hidden_units=hidden_units,
+              feature_columns=[
+                  self._fc_impl.numeric_column('age'),
+                  self._fc_impl.numeric_column('height')
+              ],
+              activation_fn=tf.nn.relu,
+              dropout=None,
+              input_layer_partitioner=input_layer_partitioner,
+              batch_norm=False)
+          logits = logit_fn(
+              features={
+                  'age': tf.constant(inputs[0]),
+                  'height': tf.constant(inputs[1])
+              },
+              mode=mode)
+          with tf.compat.v1.train.MonitoredTrainingSession(
+              checkpoint_dir=self._model_dir) as sess:
+            self.assertAllClose(expected_logits, sess.run(logits))
+
+  def test_multi_feature_column_mix_multi_dim_logits(self):
+    """Tests multiple feature columns and multi-dimensional logits.
+
+    All numbers are the same as test_multi_dim_input_multi_dim_logits. The only
+    difference is that the input consists of two 1D feature columns, instead of
+    one 2D feature column.
+    """
+    base_global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    hidden_units = (2, 2)
+    logits_dimension = 3
+    inputs = ([[10.]], [[8.]])
+    expected_logits = [[-0.48, 0.48, 0.39]]
+
+    for mode in [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]:
+      with tf.Graph().as_default():
+        # Global step needed for MonitoredSession, which is in turn used to
+        # explicitly set variable weights through a checkpoint.
+        tf.compat.v1.train.create_global_step()
+        # Use a variable scope here with 'dnn', emulating the dnn model_fn, so
+        # the checkpoint naming is shared.
+        with tf.compat.v1.variable_scope('dnn'):
+          input_layer_partitioner = (
+              tf.compat.v1.min_max_variable_partitioner(
+                  max_partitions=0, min_slice_size=64 << 20))
+          logit_fn = self._dnn_logit_fn_builder(
+              units=logits_dimension,
+              hidden_units=hidden_units,
+              feature_columns=[
+                  feature_column.numeric_column('age'),
+                  tf.feature_column.numeric_column('height')
+              ],
+              activation_fn=tf.nn.relu,
+              dropout=None,
+              input_layer_partitioner=input_layer_partitioner,
+              batch_norm=False)
+          logits = logit_fn(
+              features={
+                  'age': tf.constant(inputs[0]),
+                  'height': tf.constant(inputs[1])
+              },
+              mode=mode)
+          with tf.compat.v1.train.MonitoredTrainingSession(
+              checkpoint_dir=self._model_dir) as sess:
+            self.assertAllClose(expected_logits, sess.run(logits))
+
+
+class BaseDNNWarmStartingTest(object):
+
+  def __init__(self,
+               _dnn_classifier_fn,
+               _dnn_regressor_fn,
+               fc_impl=feature_column):
+    self._dnn_classifier_fn = _dnn_classifier_fn
+    self._dnn_regressor_fn = _dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    # Create a directory to save our old checkpoint and vocabularies to.
+    self._ckpt_and_vocab_dir = tempfile.mkdtemp()
+
+    # Make a dummy input_fn.
+    def _input_fn():
+      features = {
+          'city': [['Palo Alto'], ['Mountain View']],
+          'locality': [['Palo Alto'], ['Mountain View']],
+          'occupation': [['doctor'], ['consultant']]
+      }
+      return features, [0, 1]
+
+    self._input_fn = _input_fn
+
+  def tearDown(self):
+    # Clean up checkpoint / vocab dir.
+    tf.compat.v1.summary.FileWriterCache.clear()
+    shutil.rmtree(self._ckpt_and_vocab_dir)
+
+  def assertAllNotClose(self, t1, t2):
+    """Helper assert for arrays."""
+    sum_of_abs_diff = 0.0
+    for x, y in zip(t1, t2):
+      try:
+        for a, b in zip(x, y):
+          sum_of_abs_diff += abs(b - a)
+      except TypeError:
+        sum_of_abs_diff += abs(y - x)
+    self.assertGreater(sum_of_abs_diff, 0)
+
+  def test_classifier_basic_warm_starting(self):
+    """Tests correctness of DNNClassifier default warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=dnn_classifier.model_dir)
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      self.assertAllClose(
+          dnn_classifier.get_variable_value(variable_name),
+          warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_regressor_basic_warm_starting(self):
+    """Tests correctness of DNNRegressor default warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNRegressor and train to save a checkpoint.
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        optimizer='SGD')
+    dnn_regressor.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNRegressor, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=dnn_regressor.model_dir)
+
+    warm_started_dnn_regressor.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_regressor.get_variable_names():
+      self.assertAllClose(
+          dnn_regressor.get_variable_value(variable_name),
+          warm_started_dnn_regressor.get_variable_value(variable_name))
+
+  def test_warm_starting_selective_variables(self):
+    """Tests selecting variables to warm-start."""
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        # The provided regular expression will only warm-start the city
+        # embedding, not the kernels and biases of the hidden weights.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            vars_to_warm_start='.*(city).*'))
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'city' in variable_name:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+      elif 'bias' in variable_name:
+        # Hidden layer biases are zero-initialized.
+        bias_values = warm_started_dnn_classifier.get_variable_value(
+            variable_name)
+        self.assertAllClose(np.zeros_like(bias_values), bias_values)
+      elif 'kernel' in variable_name:
+        # We can't override the glorot uniform initializer used for the kernels
+        # in the dense layers, so just make sure we're not getting the same
+        # values from the old checkpoint.
+        self.assertAllNotClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_warm_starting_with_vocab_remapping_and_partitioning(self):
+    """Tests warm-starting with vocab remapping and partitioning."""
+    vocab_list = ['doctor', 'lawyer', 'consultant']
+    vocab_file = os.path.join(self._ckpt_and_vocab_dir, 'occupation_vocab')
+    with open(vocab_file, 'w') as f:
+      f.write('\n'.join(vocab_list))
+    occupation = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_file(
+            'occupation',
+            vocabulary_file=vocab_file,
+            vocabulary_size=len(vocab_list)),
+        dimension=2)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    partitioner = tf.compat.v1.fixed_size_partitioner(num_shards=2)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[occupation],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD',
+        input_layer_partitioner=partitioner)
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).  Use a new FeatureColumn with a
+    # different vocabulary for occupation.
+    new_vocab_list = ['doctor', 'consultant', 'engineer']
+    new_vocab_file = os.path.join(self._ckpt_and_vocab_dir,
+                                  'new_occupation_vocab')
+    with open(new_vocab_file, 'w') as f:
+      f.write('\n'.join(new_vocab_list))
+    new_occupation = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_file(
+            'occupation',
+            vocabulary_file=new_vocab_file,
+            vocabulary_size=len(new_vocab_list)),
+        dimension=2)
+    # We can create our VocabInfo object from the new and old occupation
+    # FeatureColumn's.
+    occupation_vocab_info = estimator.VocabInfo(
+        new_vocab=new_occupation.categorical_column.vocabulary_file,
+        new_vocab_size=new_occupation.categorical_column.vocabulary_size,
+        num_oov_buckets=new_occupation.categorical_column.num_oov_buckets,
+        old_vocab=occupation.categorical_column.vocabulary_file,
+        old_vocab_size=occupation.categorical_column.vocabulary_size,
+        # Can't use constant_initializer with load_and_remap.  In practice,
+        # use a truncated normal initializer.
+        backup_initializer=tf.compat.v1.initializers.random_uniform(
+            minval=0.39, maxval=0.39))
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[occupation],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            var_name_to_vocab_info={
+                OCCUPATION_EMBEDDING_NAME: occupation_vocab_info
+            },
+            # Explicitly providing None here will only warm-start variables
+            # referenced in var_name_to_vocab_info (no hidden weights will be
+            # warmstarted).
+            vars_to_warm_start=None),
+        input_layer_partitioner=partitioner)
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    # 'doctor' was ID-0 and still ID-0.
+    self.assertAllClose(
+        dnn_classifier.get_variable_value(OCCUPATION_EMBEDDING_NAME)[0, :],
+        warm_started_dnn_classifier.get_variable_value(
+            OCCUPATION_EMBEDDING_NAME)[0, :])
+    # 'consultant' was ID-2 and now ID-1.
+    self.assertAllClose(
+        dnn_classifier.get_variable_value(OCCUPATION_EMBEDDING_NAME)[2, :],
+        warm_started_dnn_classifier.get_variable_value(
+            OCCUPATION_EMBEDDING_NAME)[1, :])
+    # 'engineer' is a new entry and should be initialized with the
+    # backup_initializer in VocabInfo.
+    self.assertAllClose([0.39] * 2,
+                        warm_started_dnn_classifier.get_variable_value(
+                            OCCUPATION_EMBEDDING_NAME)[2, :])
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'bias' in variable_name:
+        # Hidden layer biases are zero-initialized.
+        bias_values = warm_started_dnn_classifier.get_variable_value(
+            variable_name)
+        self.assertAllClose(np.zeros_like(bias_values), bias_values)
+      elif 'kernel' in variable_name:
+        # We can't override the glorot uniform initializer used for the kernels
+        # in the dense layers, so just make sure we're not getting the same
+        # values from the old checkpoint.
+        self.assertAllNotClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+  def test_warm_starting_with_naming_change(self):
+    """Tests warm-starting with a Tensor name remapping."""
+    locality = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'locality', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+
+    # Create a DNNClassifier and train to save a checkpoint.
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[locality],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second DNNClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    city = self._fc_impl.embedding_column(
+        self._fc_impl.categorical_column_with_vocabulary_list(
+            'city', vocabulary_list=['Mountain View', 'Palo Alto']),
+        dimension=5)
+    warm_started_dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=[256, 128],
+        feature_columns=[city],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        # The 'city' variable correspond to the 'locality' variable in the
+        # previous model.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=dnn_classifier.model_dir,
+            var_name_to_prev_var_name={
+                CITY_EMBEDDING_NAME:
+                    CITY_EMBEDDING_NAME.replace('city', 'locality')
+            }))
+
+    warm_started_dnn_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_dnn_classifier.get_variable_names():
+      if 'city' in variable_name:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(
+                CITY_EMBEDDING_NAME.replace('city', 'locality')),
+            warm_started_dnn_classifier.get_variable_value(CITY_EMBEDDING_NAME))
+      else:
+        self.assertAllClose(
+            dnn_classifier.get_variable_value(variable_name),
+            warm_started_dnn_classifier.get_variable_value(variable_name))
+
+
+class BaseDNNClassifierEvaluateTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts evaluation metrics for one-dimensional input and logits."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10.], [10.]]}, [[1], [0]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08], [-2.08]] =>
+    # logistic = 1/(1 + exp(-logits)) = [[0.11105597], [0.11105597]]
+    # loss = -1. * log(0.111) -1. * log(0.889) = 2.31544200
+    expected_loss = 2.31544200
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss / 2.,
+            metric_keys.MetricKeys.ACCURACY: 0.5,
+            metric_keys.MetricKeys.PRECISION: 0.0,
+            metric_keys.MetricKeys.RECALL: 0.0,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 0.11105597,
+            metric_keys.MetricKeys.LABEL_MEAN: 0.5,
+            metric_keys.MetricKeys.ACCURACY_BASELINE: 0.5,
+            # There is no good way to calculate AUC for only two data points.
+            # But that is what the algorithm returns.
+            metric_keys.MetricKeys.AUC: 0.5,
+            metric_keys.MetricKeys.AUC_PR: 0.75,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        },
+        dnn_classifier.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    n_classes = 3
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10., 8.], [10., 8.]]}, [[1], [0]]
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39], [-0.48, 0.48, 0.39]]
+    # probabilities = exp(logits)/sum(exp(logits))
+    #               = [[0.16670536, 0.43538380, 0.39791084],
+    #                  [0.16670536, 0.43538380, 0.39791084]]
+    # loss = -log(0.43538380) - log(0.16670536)
+    expected_loss = 2.62305466
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss / 2,
+            metric_keys.MetricKeys.ACCURACY: 0.5,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_classifier.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_float_labels(self):
+    """Asserts evaluation metrics for float labels in binary classification."""
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10.], [10.]]}, [[0.8], [0.4]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08], [-2.08]] =>
+    # logistic = 1/(1 + exp(-logits)) = [[0.11105597], [0.11105597]]
+    # loss = -0.8 * log(0.111) -0.2 * log(0.889)
+    #        -0.4 * log(0.111) -0.6 * log(0.889) = 2.7314420
+    metrics = dnn_classifier.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(2.7314420, metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_multi_dim_weights(self):
+    """Tests evaluation with weights."""
+    # Uses same checkpoint with test_multi_dims
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    n_classes = 3
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        n_classes=n_classes,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      # batch_size = 2, one false label, and one true.
+      return {'age': [[10., 8.], [10., 8.]], 'w': [[10.], [100.]]}, [[1], [0]]
+
+    # Uses identical numbers as test_multi_dims
+    # See that test for calculation of logits.
+    # loss = -log(0.43538380)*10 - log(0.16670536)*100
+    expected_loss = 187.468007
+    metrics = dnn_classifier.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(
+        expected_loss, metrics[metric_keys.MetricKeys.LOSS], places=3)
+
+
+class BaseDNNRegressorEvaluateTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts evaluation metrics for one-dimensional input and logits."""
+    # Create checkpoint: num_inputs=1, hidden_units=(2, 2), num_outputs=1.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), global_step, self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age')],
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10.]]}, [[1.]]
+
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08]] => predictions = [-2.08].
+    # loss = (1+2.08)^2 = 9.4864
+    expected_loss = 9.4864
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+            metric_keys.MetricKeys.PREDICTION_MEAN: -2.08,
+            metric_keys.MetricKeys.LABEL_MEAN: 1.0,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_regressor.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    # Create checkpoint: num_inputs=2, hidden_units=(2, 2), num_outputs=3.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    label_dimension = 3
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10., 8.]]}, [[1., -1., 0.5]]
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]]
+    # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
+    expected_loss = 4.3929
+    self.assertAllClose(
+        {
+            metric_keys.MetricKeys.LOSS: expected_loss,
+            metric_keys.MetricKeys.LOSS_MEAN: expected_loss / label_dimension,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 0.39 / 3.0,
+            metric_keys.MetricKeys.LABEL_MEAN: 0.5 / 3.0,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: global_step
+        }, dnn_regressor.evaluate(input_fn=_input_fn, steps=1))
+
+  def test_multi_dim_weights(self):
+    """Asserts evaluation metrics for multi-dimensional input and logits."""
+    # same checkpoint with test_multi_dim.
+    global_step = 100
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), global_step, self._model_dir)
+    label_dimension = 3
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=[self._fc_impl.numeric_column('age', shape=[2])],
+        label_dimension=label_dimension,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {'age': [[10., 8.]], 'w': [10.]}, [[1., -1., 0.5]]
+
+    # Uses identical numbers as test_multi_dim.
+    # See that test for calculation of logits.
+    # loss = 4.3929*10
+    expected_loss = 43.929
+    metrics = dnn_regressor.evaluate(input_fn=_input_fn, steps=1)
+    self.assertAlmostEqual(
+        expected_loss, metrics[metric_keys.MetricKeys.LOSS], places=3)
+
+
+class BaseDNNClassifierPredictTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_one_dim(self, label_vocabulary, label_output_fn):
+    """Asserts predictions for one-dimensional input and logits."""
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        label_vocabulary=label_vocabulary,
+        feature_columns=(self._fc_impl.numeric_column('x'),),
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] =>
+    # logistic = exp(-2.08)/(1 + exp(-2.08)) = 0.11105597
+    # probabilities = [1-logistic, logistic] = [0.88894403, 0.11105597]
+    # class_ids = argmax(probabilities) = [0]
+    predictions = next(dnn_classifier.predict(input_fn=input_fn))
+    self.assertAllClose([-2.08],
+                        predictions[prediction_keys.PredictionKeys.LOGITS])
+    self.assertAllClose([0.11105597],
+                        predictions[prediction_keys.PredictionKeys.LOGISTIC])
+    self.assertAllClose(
+        [0.88894403, 0.11105597],
+        predictions[prediction_keys.PredictionKeys.PROBABILITIES])
+    self.assertAllClose([0],
+                        predictions[prediction_keys.PredictionKeys.CLASS_IDS])
+    self.assertAllEqual([label_output_fn(0)],
+                        predictions[prediction_keys.PredictionKeys.CLASSES])
+
+  def test_one_dim_without_label_vocabulary(self):
+    self._test_one_dim(
+        label_vocabulary=None, label_output_fn=lambda x: ('%s' % x).encode())
+
+  def test_one_dim_with_label_vocabulary(self):
+    n_classes = 2
+    self._test_one_dim(
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def _test_multi_dim_with_3_classes(self, label_vocabulary, label_output_fn):
+    """Asserts predictions for multi-dimensional input and logits."""
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x', shape=(2,)),),
+        label_vocabulary=label_vocabulary,
+        n_classes=3,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        # Inputs shape is (batch_size, num_inputs).
+        x={'x': np.array([[10., 8.]])},
+        batch_size=1,
+        shuffle=False)
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-0.48, 0.48, 0.39] =>
+    # probabilities[i] = exp(logits[i]) / sum_j exp(logits[j]) =>
+    # probabilities = [0.16670536, 0.43538380, 0.39791084]
+    # class_ids = argmax(probabilities) = [1]
+    predictions = next(dnn_classifier.predict(input_fn=input_fn))
+    self.assertItemsEqual([
+        prediction_keys.PredictionKeys.LOGITS,
+        prediction_keys.PredictionKeys.PROBABILITIES,
+        prediction_keys.PredictionKeys.CLASS_IDS,
+        prediction_keys.PredictionKeys.CLASSES,
+        prediction_keys.PredictionKeys.ALL_CLASS_IDS,
+        prediction_keys.PredictionKeys.ALL_CLASSES
+    ], six.iterkeys(predictions))
+    self.assertAllClose([-0.48, 0.48, 0.39],
+                        predictions[prediction_keys.PredictionKeys.LOGITS])
+    self.assertAllClose(
+        [0.16670536, 0.43538380, 0.39791084],
+        predictions[prediction_keys.PredictionKeys.PROBABILITIES])
+    self.assertAllEqual([1],
+                        predictions[prediction_keys.PredictionKeys.CLASS_IDS])
+    self.assertAllEqual([label_output_fn(1)],
+                        predictions[prediction_keys.PredictionKeys.CLASSES])
+
+  def test_multi_dim_with_3_classes_but_no_label_vocab(self):
+    self._test_multi_dim_with_3_classes(
+        label_vocabulary=None, label_output_fn=lambda x: ('%s' % x).encode())
+
+  def test_multi_dim_with_3_classes_and_label_vocab(self):
+    n_classes = 3
+    self._test_multi_dim_with_3_classes(
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+
+class BaseDNNRegressorPredictTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_one_dim(self):
+    """Asserts predictions for one-dimensional input and logits."""
+    # Create checkpoint: num_inputs=1, hidden_units=(2, 2), num_outputs=1.
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ),
+                      global_step=0,
+                      model_dir=self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x'),),
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
+    # Uses identical numbers as DNNModelTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-2.08]] => predictions = [-2.08].
+    self.assertAllClose({
+        prediction_keys.PredictionKeys.PREDICTIONS: [-2.08],
+    }, next(dnn_regressor.predict(input_fn=input_fn)))
+
+  def test_multi_dim(self):
+    """Asserts predictions for multi-dimensional input and logits."""
+    # Create checkpoint: num_inputs=2, hidden_units=(2, 2), num_outputs=3.
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), 100, self._model_dir)
+
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=(2, 2),
+        feature_columns=(self._fc_impl.numeric_column('x', shape=(2,)),),
+        label_dimension=3,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        # Inputs shape is (batch_size, num_inputs).
+        x={'x': np.array([[10., 8.]])},
+        batch_size=1,
+        shuffle=False)
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]] => predictions = [-0.48, 0.48, 0.39]
+    self.assertAllClose(
+        {
+            prediction_keys.PredictionKeys.PREDICTIONS: [-0.48, 0.48, 0.39],
+        }, next(dnn_regressor.predict(input_fn=input_fn)))
+
+
+class _SummaryHook(tf.compat.v1.train.SessionRunHook):
+  """Saves summaries every N steps."""
+
+  def __init__(self):
+    self._summaries = []
+
+  def begin(self):
+    self._summary_op = tf.compat.v1.summary.merge_all()
+
+  def before_run(self, run_context):
+    return tf.compat.v1.train.SessionRunArgs({'summary': self._summary_op})
+
+  def after_run(self, run_context, run_values):
+    s = summary_pb2.Summary()
+    s.ParseFromString(run_values.results['summary'])
+    self._summaries.append(s)
+
+  def summaries(self):
+    return tuple(self._summaries)
+
+
+def _assert_checkpoint(testcase, global_step, input_units, hidden_units,
+                       output_units, model_dir):
+  """Asserts checkpoint contains expected variables with proper shapes.
+
+  Args:
+    testcase: A TestCase instance.
+    global_step: Expected global step value.
+    input_units: The dimension of input layer.
+    hidden_units: Iterable of integer sizes for the hidden layers.
+    output_units: The dimension of output layer (logits).
+    model_dir: The model directory.
+  """
+  shapes = {name: shape for (name, shape) in tf.train.list_variables(model_dir)}
+
+  # Global step.
+  testcase.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+  testcase.assertEqual(
+      global_step,
+      tf.train.load_variable(model_dir, tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+  # Hidden layer weights.
+  prev_layer_units = input_units
+  for i in range(len(hidden_units)):
+    layer_units = hidden_units[i]
+    testcase.assertAllEqual((prev_layer_units, layer_units),
+                            shapes[HIDDEN_WEIGHTS_NAME_PATTERN % i])
+    testcase.assertAllEqual((layer_units,),
+                            shapes[HIDDEN_BIASES_NAME_PATTERN % i])
+    prev_layer_units = layer_units
+
+  # Output layer weights.
+  testcase.assertAllEqual((prev_layer_units, output_units),
+                          shapes[LOGITS_WEIGHTS_NAME])
+  testcase.assertAllEqual((output_units,), shapes[LOGITS_BIASES_NAME])
+
+
+def _assert_simple_summary(testcase, expected_values, actual_summary):
+  """Assert summary the specified simple values.
+
+  Args:
+    testcase: A TestCase instance.
+    expected_values: Dict of expected tags and simple values.
+    actual_summary: `summary_pb2.Summary`.
+  """
+  testcase.assertAllClose(
+      expected_values, {
+          v.tag: v.simple_value
+          for v in actual_summary.value
+          if (v.tag in expected_values)
+      })
+
+
+class BaseDNNClassifierTrainTest(object):
+
+  def __init__(self, dnn_classifier_fn, fc_impl=feature_column):
+    self._dnn_classifier_fn = dnn_classifier_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_from_scratch_with_default_optimizer_binary(self):
+    hidden_units = (2, 2)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_from_scratch_with_default_optimizer_multi_class(self):
+    hidden_units = (2, 2)
+    n_classes = 3
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[2]]), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=n_classes,
+        model_dir=self._model_dir)
+
+  def test_from_scratch_validate_summary(self):
+    hidden_units = (2, 2)
+    opt = mock_optimizer(self, hidden_units=hidden_units)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      summary_keys = [v.tag for v in summary.value]
+      self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
+      self.assertIn(metric_keys.MetricKeys.LOSS_MEAN, summary_keys)
+
+  def test_binary_classification(self):
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => probabilities = [0.889, 0.111]
+    # loss = -1. * log(0.111) = 2.19772100
+    expected_loss = 2.19772100
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+              'dnn/dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/dnn/hiddenlayer_1/fraction_of_zero_values': .5,
+              'dnn/dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_binary_classification_float_labels(self):
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => probabilities = [0.889, 0.111]
+    # loss = -0.8 * log(0.111) -0.2 * log(0.889) = 1.7817210
+    expected_loss = 1.7817210
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[0.8]]), steps=num_steps)
+    self.assertEqual(1, opt.minimize.call_count)
+
+  def test_multi_class(self):
+    n_classes = 3
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08, 2.08, 1.19] => probabilities = [0.0109, 0.7011, 0.2879]
+    # loss = -1. * log(0.7011) = 0.35505795
+    expected_loss = 0.35505795
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_classifier = self._dnn_classifier_fn(
+        n_classes=n_classes,
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_classifier.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+              'dnn/dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/dnn/hiddenlayer_1/fraction_of_zero_values': .5,
+              'dnn/dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=n_classes,
+        model_dir=self._model_dir)
+
+
+class BaseDNNRegressorTrainTest(object):
+
+  def __init__(self, dnn_regressor_fn, fc_impl=feature_column):
+    self._dnn_regressor_fn = dnn_regressor_fn
+    self._fc_impl = fc_impl
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_from_scratch_with_default_optimizer(self):
+    hidden_units = (2, 2)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, then validate final checkpoint.
+    num_steps = 5
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((10,),)), steps=num_steps)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_from_scratch(self):
+    hidden_units = (2, 2)
+    opt = mock_optimizer(self, hidden_units=hidden_units)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((5.,),)),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    _assert_checkpoint(
+        self,
+        num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      summary_keys = [v.tag for v in summary.value]
+      self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
+      self.assertIn(metric_keys.MetricKeys.LOSS_MEAN, summary_keys)
+
+  def test_one_dim(self):
+    """Asserts train loss for one-dimensional input and logits."""
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1.], [1.]], [.3]),
+    ), base_global_step, self._model_dir)
+
+    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [-2.08] => predictions = [-2.08]
+    # loss = (1 + 2.08)^2 = 9.4864
+    expected_loss = 9.4864
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=(self._fc_impl.numeric_column('age'),),
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': [[10.]]
+        }, [[1.]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+              'dnn/dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/dnn/hiddenlayer_1/fraction_of_zero_values': 0.5,
+              'dnn/dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=1,
+        hidden_units=hidden_units,
+        output_units=1,
+        model_dir=self._model_dir)
+
+  def test_multi_dim(self):
+    """Asserts train loss for multi-dimensional input and logits."""
+    base_global_step = 100
+    hidden_units = (2, 2)
+    create_checkpoint((
+        ([[.6, .5], [-.6, -.5]], [.1, -.1]),
+        ([[1., .8], [-.8, -1.]], [.2, -.2]),
+        ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
+    ), base_global_step, self._model_dir)
+    input_dimension = 2
+    label_dimension = 3
+
+    # Uses identical numbers as
+    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
+    # See that test for calculation of logits.
+    # logits = [[-0.48, 0.48, 0.39]]
+    # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
+    expected_loss = 4.3929
+    opt = mock_optimizer(
+        self, hidden_units=hidden_units, expected_loss=expected_loss)
+    dnn_regressor = self._dnn_regressor_fn(
+        hidden_units=hidden_units,
+        feature_columns=[
+            self._fc_impl.numeric_column('age', shape=[input_dimension])
+        ],
+        label_dimension=label_dimension,
+        optimizer=opt,
+        model_dir=self._model_dir)
+    self.assertEqual(0, opt.minimize.call_count)
+
+    # Train for a few steps, then validate optimizer, summaries, and
+    # checkpoint.
+    num_steps = 5
+    summary_hook = _SummaryHook()
+    dnn_regressor.train(
+        input_fn=lambda: ({
+            'age': [[10., 8.]]
+        }, [[1., -1., 0.5]]),
+        steps=num_steps,
+        hooks=(summary_hook,))
+    self.assertEqual(1, opt.minimize.call_count)
+    summaries = summary_hook.summaries()
+    self.assertEqual(num_steps, len(summaries))
+    for summary in summaries:
+      _assert_simple_summary(
+          self, {
+              metric_keys.MetricKeys.LOSS_MEAN: expected_loss / label_dimension,
+              'dnn/dnn/hiddenlayer_0/fraction_of_zero_values': 0.,
+              'dnn/dnn/hiddenlayer_1/fraction_of_zero_values': 0.5,
+              'dnn/dnn/logits/fraction_of_zero_values': 0.,
+              metric_keys.MetricKeys.LOSS: expected_loss,
+          }, summary)
+    _assert_checkpoint(
+        self,
+        base_global_step + num_steps,
+        input_units=input_dimension,
+        hidden_units=hidden_units,
+        output_units=label_dimension,
+        model_dir=self._model_dir)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/linear_testing_utils_v1.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/linear_testing_utils_v1.py
new file mode 100644
index 00000000..f73ac2d1
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/canned/v1/linear_testing_utils_v1.py
@@ -0,0 +1,2410 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utils for testing linear estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+import os
+import shutil
+import tempfile
+
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.core.example import example_pb2
+from tensorflow.core.example import feature_pb2
+from tensorflow.python.feature_column import feature_column
+from tensorflow.python.feature_column import feature_column_v2
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import variables as variables_lib
+from tensorflow_estimator.python.estimator import estimator
+from tensorflow_estimator.python.estimator import run_config
+from tensorflow_estimator.python.estimator.canned import linear
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.export import export
+from tensorflow_estimator.python.estimator.inputs import numpy_io
+from tensorflow_estimator.python.estimator.inputs import pandas_io
+
+try:
+  # pylint: disable=g-import-not-at-top
+  import pandas as pd
+  HAS_PANDAS = True
+except IOError:
+  # Pandas writes a temporary file during import. If it fails, don't use pandas.
+  HAS_PANDAS = False
+except ImportError:
+  HAS_PANDAS = False
+
+# pylint rules which are disabled by default for test files.
+# pylint: disable=invalid-name,protected-access,missing-docstring
+
+# Names of variables created by model.
+AGE_WEIGHT_NAME = 'linear/linear_model/age/weights'
+HEIGHT_WEIGHT_NAME = 'linear/linear_model/height/weights'
+OCCUPATION_WEIGHT_NAME = 'linear/linear_model/occupation/weights'
+BIAS_NAME = 'linear/linear_model/bias_weights'
+LANGUAGE_WEIGHT_NAME = 'linear/linear_model/language/weights'
+
+# This is so that we can easily switch between feature_column and
+# feature_column_v2 for testing.
+feature_column.numeric_column = feature_column._numeric_column
+feature_column.categorical_column_with_hash_bucket = feature_column._categorical_column_with_hash_bucket  # pylint: disable=line-too-long
+feature_column.categorical_column_with_vocabulary_list = feature_column._categorical_column_with_vocabulary_list  # pylint: disable=line-too-long
+feature_column.categorical_column_with_vocabulary_file = feature_column._categorical_column_with_vocabulary_file  # pylint: disable=line-too-long
+feature_column.embedding_column = feature_column._embedding_column
+
+
+def assert_close(expected, actual, rtol=1e-04, name='assert_close'):
+  with ops.name_scope(name, 'assert_close', (expected, actual, rtol)) as scope:
+    expected = ops.convert_to_tensor(expected, name='expected')
+    actual = ops.convert_to_tensor(actual, name='actual')
+    rdiff = tf.math.abs(expected - actual, 'diff') / tf.math.abs(expected)
+    rtol = ops.convert_to_tensor(rtol, name='rtol')
+    return tf.compat.v1.debugging.assert_less(
+        rdiff,
+        rtol,
+        data=('Condition expected =~ actual did not hold element-wise:'
+              'expected = ', expected, 'actual = ', actual, 'rdiff = ', rdiff,
+              'rtol = ', rtol,),
+        name=scope)
+
+
+def save_variables_to_ckpt(model_dir):
+  init_all_op = [tf.compat.v1.initializers.global_variables()]
+  with tf.compat.v1.Session() as sess:
+    sess.run(init_all_op)
+    tf.compat.v1.train.Saver().save(sess, os.path.join(model_dir, 'model.ckpt'))
+
+
+def queue_parsed_features(feature_map):
+  tensors_to_enqueue = []
+  keys = []
+  for key, tensor in six.iteritems(feature_map):
+    keys.append(key)
+    tensors_to_enqueue.append(tensor)
+  queue_dtypes = [x.dtype for x in tensors_to_enqueue]
+  input_queue = tf.queue.FIFOQueue(capacity=100, dtypes=queue_dtypes)
+  tf.compat.v1.train.queue_runner.add_queue_runner(
+      tf.compat.v1.train.queue_runner.QueueRunner(
+          input_queue, [input_queue.enqueue(tensors_to_enqueue)]))
+  dequeued_tensors = input_queue.dequeue()
+  return {keys[i]: dequeued_tensors[i] for i in range(len(dequeued_tensors))}
+
+
+def sorted_key_dict(unsorted_dict):
+  return {k: unsorted_dict[k] for k in sorted(unsorted_dict)}
+
+
+def sigmoid(x):
+  return 1 / (1 + np.exp(-1.0 * x))
+
+
+class CheckPartitionerVarHook(tf.compat.v1.train.SessionRunHook):
+  """A `SessionRunHook` to check a partitioned variable."""
+
+  def __init__(self, test_case, var_name, var_dim, partitions):
+    self._test_case = test_case
+    self._var_name = var_name
+    self._var_dim = var_dim
+    self._partitions = partitions
+
+  def begin(self):
+    with tf.compat.v1.variable_scope(
+        tf.compat.v1.get_variable_scope()) as scope:
+      scope.reuse_variables()
+      partitioned_weight = tf.compat.v1.get_variable(
+          self._var_name, shape=(self._var_dim, 1))
+      self._test_case.assertTrue(
+          isinstance(partitioned_weight, variables_lib.PartitionedVariable))
+      for part in partitioned_weight:
+        self._test_case.assertEqual(self._var_dim // self._partitions,
+                                    part.get_shape()[0])
+
+
+class BaseLinearRegressorPartitionerTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def testPartitioner(self):
+    x_dim = 64
+    partitions = 4
+
+    def _partitioner(shape, dtype):
+      del dtype  # unused; required by Fn signature.
+      # Only partition the embedding tensor.
+      return [partitions, 1] if shape[0] == x_dim else [1]
+
+    regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.categorical_column_with_hash_bucket(
+            'language', hash_bucket_size=x_dim),),
+        partitioner=_partitioner,
+        model_dir=self._model_dir)
+
+    def _input_fn():
+      return {
+          'language':
+              tf.sparse.SparseTensor(
+                  values=['english', 'spanish'],
+                  indices=[[0, 0], [0, 1]],
+                  dense_shape=[1, 2])
+      }, [[10.]]
+
+    hook = CheckPartitionerVarHook(self, LANGUAGE_WEIGHT_NAME, x_dim,
+                                   partitions)
+    regressor.train(input_fn=_input_fn, steps=1, hooks=[hook])
+
+  def testDefaultPartitionerWithMultiplePsReplicas(self):
+    partitions = 2
+    # This results in weights larger than the default partition size of 64M,
+    # so partitioned weights are created (each weight uses 4 bytes).
+    x_dim = 32 << 20
+
+    class FakeRunConfig(run_config.RunConfig):
+
+      @property
+      def num_ps_replicas(self):
+        return partitions
+
+    # Mock the device setter as ps is not available on test machines.
+    with tf.compat.v1.test.mock.patch.object(
+        estimator,
+        '_get_replica_device_setter',
+        return_value=lambda _: '/cpu:0'):
+      linear_regressor = self._linear_regressor_fn(
+          feature_columns=(self._fc_lib.categorical_column_with_hash_bucket(
+              'language', hash_bucket_size=x_dim),),
+          config=FakeRunConfig(),
+          model_dir=self._model_dir)
+
+      def _input_fn():
+        return {
+            'language':
+                tf.sparse.SparseTensor(
+                    values=['english', 'spanish'],
+                    indices=[[0, 0], [0, 1]],
+                    dense_shape=[1, 2])
+        }, [[10.]]
+
+      hook = CheckPartitionerVarHook(self, LANGUAGE_WEIGHT_NAME, x_dim,
+                                     partitions)
+      linear_regressor.train(input_fn=_input_fn, steps=1, hooks=[hook])
+
+
+# TODO(b/36813849): Add tests with dynamic shape inputs using placeholders.
+class BaseLinearRegressorEvaluationTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_evaluation_for_simple_data(self):
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(
+        input_fn=lambda: ({
+            'age': ((1,),)
+        }, ((10.,),)), steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10. Loss is 3**2 = 9.
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 9.,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_batch(self):
+    """Tests evaluation for batch_size==2."""
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(
+        input_fn=lambda: ({
+            'age': ((1,), (1,))
+        }, ((10.,), (10.,))), steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10.
+    # Loss per example is 3**2 = 9.
+    # Training loss is the sum over batch = 9 + 9 = 18
+    # Average loss is the average over batch = 9
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 18.,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_weights(self):
+    """Tests evaluation with weights."""
+    with tf.Graph().as_default():
+      tf.Variable([[11.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([2.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      features = {'age': ((1,), (1,)), 'weights': ((1.,), (2.,))}
+      labels = ((10.,), (10.,))
+      return features, labels
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='weights',
+        model_dir=self._model_dir)
+    eval_metrics = linear_regressor.evaluate(input_fn=_input_fn, steps=1)
+
+    # Logit is (1. * 11.0 + 2.0) = 13, while label is 10.
+    # Loss per example is 3**2 = 9.
+    # Training loss is the weighted sum over batch = 9 + 2*9 = 27
+    # average loss is the weighted average = 9 + 2*9 / (1 + 2) = 9
+    self.assertDictEqual(
+        {
+            metric_keys.MetricKeys.LOSS: 27.,
+            metric_keys.MetricKeys.LOSS_MEAN: 9.,
+            metric_keys.MetricKeys.PREDICTION_MEAN: 13.,
+            metric_keys.MetricKeys.LABEL_MEAN: 10.,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP: 100
+        }, eval_metrics)
+
+  def test_evaluation_for_multi_dimensions(self):
+    x_dim = 3
+    label_dim = 2
+    with tf.Graph().as_default():
+      tf.Variable([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([7.0, 8.0], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age', shape=(x_dim,)),),
+        label_dimension=label_dim,
+        model_dir=self._model_dir)
+    input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': np.array([[2., 4., 5.]]),
+        },
+        y=np.array([[46., 58.]]),
+        batch_size=1,
+        num_epochs=None,
+        shuffle=False)
+    eval_metrics = linear_regressor.evaluate(input_fn=input_fn, steps=1)
+
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is
+    #   [2., 4., 5.] * [1.0, 2.0] + [7.0, 8.0] = [39, 50] + [7.0, 8.0]
+    #                  [3.0, 4.0]
+    #                  [5.0, 6.0]
+    # which is [46, 58]
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_evaluation_for_multiple_feature_columns(self):
+    with tf.Graph().as_default():
+      tf.Variable([[10.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([[2.0]], name=HEIGHT_WEIGHT_NAME)
+      tf.Variable([5.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    batch_size = 2
+    feature_columns = [
+        self._fc_lib.numeric_column('age'),
+        self._fc_lib.numeric_column('height')
+    ]
+    input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': np.array([20, 40]),
+            'height': np.array([4, 8])
+        },
+        y=np.array([[213.], [421.]]),
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=False)
+
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+
+    eval_metrics = est.evaluate(input_fn=input_fn, steps=1)
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is [(20. * 10.0 + 4 * 2.0 + 5.0), (40. * 10.0 + 8 * 2.0 + 5.0)] =
+    # [213.0, 421.0], while label is [213., 421.]. Loss = 0.
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+  def test_evaluation_for_multiple_feature_columns_mix(self):
+    with tf.Graph().as_default():
+      tf.Variable([[10.0]], name=AGE_WEIGHT_NAME)
+      tf.Variable([[2.0]], name=HEIGHT_WEIGHT_NAME)
+      tf.Variable([5.0], name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    batch_size = 2
+    feature_columns = [
+        feature_column.numeric_column('age'),
+        tf.feature_column.numeric_column('height')
+    ]
+
+    def _input_fn():
+      features_ds = tf.compat.v1.data.Dataset.from_tensor_slices({
+          'age': np.array([20, 40]),
+          'height': np.array([4, 8])
+      })
+      labels_ds = tf.compat.v1.data.Dataset.from_tensor_slices(
+          np.array([[213.], [421.]]))
+      return (tf.compat.v1.data.Dataset.zip(
+          (features_ds, labels_ds)).batch(batch_size).repeat(None))
+
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+
+    eval_metrics = est.evaluate(input_fn=_input_fn, steps=1)
+    self.assertItemsEqual(
+        (metric_keys.MetricKeys.LOSS, metric_keys.MetricKeys.LOSS_MEAN,
+         metric_keys.MetricKeys.PREDICTION_MEAN,
+         metric_keys.MetricKeys.LABEL_MEAN, tf.compat.v1.GraphKeys.GLOBAL_STEP),
+        eval_metrics.keys())
+
+    # Logit is [(20. * 10.0 + 4 * 2.0 + 5.0), (40. * 10.0 + 8 * 2.0 + 5.0)] =
+    # [213.0, 421.0], while label is [213., 421.]. Loss = 0.
+    self.assertAlmostEqual(0, eval_metrics[metric_keys.MetricKeys.LOSS])
+
+
+class BaseLinearRegressorPredictTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def test_1d(self):
+    """Tests predict when all variables are one-dimensional."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('x'),),
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': np.array([[2.]])},
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x * weight + bias = 2. * 10. + .2 = 20.2
+    self.assertAllClose([[20.2]], predicted_scores)
+
+  def testMultiDim(self):
+    """Tests predict when all variables are multi-dimenstional."""
+    batch_size = 2
+    label_dimension = 3
+    x_dim = 4
+    feature_columns = (self._fc_lib.numeric_column('x', shape=(x_dim,)),)
+    with tf.Graph().as_default():
+      tf.Variable(  # shape=[x_dim, label_dimension]
+          [[1., 2., 3.], [2., 3., 4.], [3., 4., 5.], [4., 5., 6.]],
+          name='linear/linear_model/x/weights')
+      tf.Variable(  # shape=[label_dimension]
+          [.2, .4, .6], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        # x shape=[batch_size, x_dim]
+        x={'x': np.array([[1., 2., 3., 4.], [5., 6., 7., 8.]])},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # score = x * weight + bias, shape=[batch_size, label_dimension]
+    self.assertAllClose([[30.2, 40.4, 50.6], [70.2, 96.4, 122.6]],
+                        predicted_scores)
+
+  def testTwoFeatureColumns(self):
+    """Tests predict with two feature columns."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x0/weights')
+      tf.Variable([[20.]], name='linear/linear_model/x1/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('x0'),
+                         self._fc_lib.numeric_column('x1')),
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'x0': np.array([[2.]]),
+            'x1': np.array([[3.]])
+        },
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = linear_regressor.predict(input_fn=predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x0 * weight0 + x1 * weight1 + bias = 2. * 10. + 3. * 20 + .2 = 80.2
+    self.assertAllClose([[80.2]], predicted_scores)
+
+  def testTwoFeatureColumnsMix(self):
+    """Tests predict with two feature columns."""
+    with tf.Graph().as_default():
+      tf.Variable([[10.]], name='linear/linear_model/x0/weights')
+      tf.Variable([[20.]], name='linear/linear_model/x1/weights')
+      tf.Variable([.2], name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(feature_column.numeric_column('x0'),
+                         tf.feature_column.numeric_column('x1')),
+        model_dir=self._model_dir)
+
+    def _predict_input_fn():
+      return tf.compat.v1.data.Dataset.from_tensor_slices({
+          'x0': np.array([[2.]]),
+          'x1': np.array([[3.]])
+      }).batch(1)
+
+    predictions = linear_regressor.predict(input_fn=_predict_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    # x0 * weight0 + x1 * weight1 + bias = 2. * 10. + 3. * 20 + .2 = 80.2
+    self.assertAllClose([[80.2]], predicted_scores)
+
+  def testSparseCombiner(self):
+    w_a = 2.0
+    w_b = 3.0
+    w_c = 5.0
+    bias = 5.0
+    with tf.Graph().as_default():
+      tf.Variable([[w_a], [w_b], [w_c]], name=LANGUAGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          1, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      return tf.compat.v1.data.Dataset.from_tensors({
+          'language':
+              tf.sparse.SparseTensor(
+                  values=['a', 'c', 'b', 'c'],
+                  indices=[[0, 0], [0, 1], [1, 0], [1, 1]],
+                  dense_shape=[2, 2]),
+      })
+
+    feature_columns = (self._fc_lib.categorical_column_with_vocabulary_list(
+        'language', vocabulary_list=['a', 'b', 'c']),)
+
+    # Check prediction for each sparse_combiner.
+    # With sparse_combiner = 'sum', we have
+    # logits_1 = w_a + w_c + bias
+    #          = 2.0 + 5.0 + 5.0 = 12.0
+    # logits_2 = w_b + w_c + bias
+    #          = 3.0 + 5.0 + 5.0 = 13.0
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[12.0], [13.0]], predicted_scores)
+
+    # With sparse_combiner = 'mean', we have
+    # logits_1 = 1/2 * (w_a + w_c) + bias
+    #          = 1/2 * (2.0 + 5.0) + 5.0 = 8.5
+    # logits_2 = 1/2 * (w_b + w_c) + bias
+    #          = 1/2 * (3.0 + 5.0) + 5.0 = 9.0
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='mean')
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[8.5], [9.0]], predicted_scores)
+
+    # With sparse_combiner = 'sqrtn', we have
+    # logits_1 = sqrt(2)/2 * (w_a + w_c) + bias
+    #          = sqrt(2)/2 * (2.0 + 5.0) + 5.0 = 9.94974
+    # logits_2 = sqrt(2)/2 * (w_b + w_c) + bias
+    #          = sqrt(2)/2 * (3.0 + 5.0) + 5.0 = 10.65685
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='sqrtn')
+    predictions = linear_regressor.predict(input_fn=_input_fn)
+    predicted_scores = list([x['predictions'] for x in predictions])
+    self.assertAllClose([[9.94974], [10.65685]], predicted_scores)
+
+
+class BaseLinearRegressorIntegrationTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
+                          input_dimension, label_dimension, prediction_length):
+    feature_columns = [
+        self._fc_lib.numeric_column('x', shape=(input_dimension,))
+    ]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+
+    # TRAIN
+    # learn y = x
+    est.train(train_input_fn, steps=200)
+
+    # EVALUTE
+    scores = est.evaluate(eval_input_fn)
+    self.assertEqual(200, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertIn(metric_keys.MetricKeys.LOSS, six.iterkeys(scores))
+
+    # PREDICT
+    predictions = np.array(
+        [x['predictions'] for x in est.predict(predict_input_fn)])
+    self.assertAllEqual((prediction_length, label_dimension), predictions.shape)
+
+    # EXPORT
+    feature_spec = tf.compat.v1.feature_column.make_parse_example_spec(
+        feature_columns)
+    serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
+        feature_spec)
+    export_dir = est.export_saved_model(tempfile.mkdtemp(),
+                                        serving_input_receiver_fn)
+    self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
+
+  def test_numpy_input_fn(self):
+    """Tests complete flow with numpy_input_fn."""
+    label_dimension = 2
+    input_dimension = label_dimension
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, label_dimension)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=data,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    eval_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=data,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+
+    self._test_complete_flow(
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+  def test_pandas_input_fn(self):
+    """Tests complete flow with pandas_input_fn."""
+    if not HAS_PANDAS:
+      return
+
+    # Pandas DataFrame natually supports 1 dim data only.
+    label_dimension = 1
+    input_dimension = label_dimension
+    batch_size = 10
+    data = np.array([1., 2., 3., 4.], dtype=np.float32)
+    x = pd.DataFrame({'x': data})
+    y = pd.Series(data)
+    prediction_length = 4
+
+    train_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
+    eval_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, shuffle=False)
+    predict_input_fn = pandas_io.pandas_input_fn(
+        x=x, batch_size=batch_size, shuffle=False)
+
+    self._test_complete_flow(
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+  def test_input_fn_from_parse_example(self):
+    """Tests complete flow with input_fn constructed from parse_example."""
+    label_dimension = 2
+    input_dimension = label_dimension
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, label_dimension)
+
+    serialized_examples = []
+    for datum in data:
+      example = example_pb2.Example(
+          features=feature_pb2.Features(
+              feature={
+                  'x':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(value=datum)),
+                  'y':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(
+                              value=datum[:label_dimension])),
+              }))
+      serialized_examples.append(example.SerializeToString())
+
+    feature_spec = {
+        'x': tf.io.FixedLenFeature([input_dimension], tf.dtypes.float32),
+        'y': tf.io.FixedLenFeature([label_dimension], tf.dtypes.float32),
+    }
+
+    def _train_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(serialized_examples,
+                                                  feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _eval_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _predict_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      features.pop('y')
+      return features, None
+
+    self._test_complete_flow(
+        train_input_fn=_train_input_fn,
+        eval_input_fn=_eval_input_fn,
+        predict_input_fn=_predict_input_fn,
+        input_dimension=input_dimension,
+        label_dimension=label_dimension,
+        prediction_length=prediction_length)
+
+
+class BaseLinearRegressorTrainingTest(object):
+
+  def __init__(self, linear_regressor_fn, fc_lib=feature_column):
+    self._linear_regressor_fn = linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      tf.compat.v1.summary.FileWriterCache.clear()
+      shutil.rmtree(self._model_dir)
+
+  def _mock_optimizer(self, expected_loss=None):
+    expected_var_names = [
+        '%s/part_0:0' % AGE_WEIGHT_NAME,
+        '%s/part_0:0' % BIAS_NAME
+    ]
+
+    def _minimize(loss, global_step=None, var_list=None):
+      trainable_vars = var_list or tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
+      self.assertItemsEqual(expected_var_names,
+                            [var.name for var in trainable_vars])
+
+      # Verify loss. We can't check the value directly, so we add an assert op.
+      self.assertEquals(0, loss.shape.ndims)
+      if expected_loss is None:
+        if global_step is not None:
+          return tf.compat.v1.assign_add(global_step, 1).op
+        return tf.no_op()
+      assert_loss = assert_close(
+          tf.cast(expected_loss, name='expected', dtype=tf.dtypes.float32),
+          loss,
+          name='assert_loss')
+      with tf.control_dependencies((assert_loss,)):
+        if global_step is not None:
+          return tf.compat.v1.assign_add(global_step, 1).op
+        return tf.no_op()
+
+    mock_optimizer = tf.compat.v1.test.mock.NonCallableMock(
+        spec=tf.compat.v1.train.Optimizer,
+        wraps=tf.compat.v1.train.Optimizer(
+            use_locking=False, name='my_optimizer'))
+    mock_optimizer.minimize = tf.compat.v1.test.mock.MagicMock(wraps=_minimize)
+
+    # NOTE: Estimator.params performs a deepcopy, which wreaks havoc with mocks.
+    # So, return mock_optimizer itself for deepcopy.
+    mock_optimizer.__deepcopy__ = lambda _: mock_optimizer
+    return mock_optimizer
+
+  def _assert_checkpoint(self,
+                         expected_global_step,
+                         expected_age_weight=None,
+                         expected_bias=None):
+    shapes = {
+        name: shape
+        for (name, shape) in tf.train.list_variables(self._model_dir)
+    }
+
+    self.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertEqual(
+        expected_global_step,
+        tf.train.load_variable(self._model_dir,
+                               tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+    self.assertEqual([1, 1], shapes[AGE_WEIGHT_NAME])
+    if expected_age_weight is not None:
+      self.assertEqual(expected_age_weight,
+                       tf.train.load_variable(self._model_dir, AGE_WEIGHT_NAME))
+
+    self.assertEqual([1], shapes[BIAS_NAME])
+    if expected_bias is not None:
+      self.assertEqual(expected_bias,
+                       tf.train.load_variable(self._model_dir, BIAS_NAME))
+
+  def testFromScratchWithDefaultOptimizer(self):
+    # Create LinearRegressor.
+    label = 5.
+    age = 17
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self._assert_checkpoint(num_steps)
+
+  def testTrainWithOneDimLabel(self):
+    label_dimension = 1
+    batch_size = 20
+    feature_columns = [self._fc_lib.numeric_column('age', shape=(1,))]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        model_dir=self._model_dir)
+    data_rank_1 = np.linspace(0., 2., batch_size, dtype=np.float32)
+    self.assertEqual((batch_size,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(200)
+
+  def testTrainWithOneDimWeight(self):
+    label_dimension = 1
+    batch_size = 20
+    feature_columns = [self._fc_lib.numeric_column('age', shape=(1,))]
+    est = self._linear_regressor_fn(
+        feature_columns=feature_columns,
+        label_dimension=label_dimension,
+        weight_column='w',
+        model_dir=self._model_dir)
+
+    data_rank_1 = np.linspace(0., 2., batch_size, dtype=np.float32)
+    self.assertEqual((batch_size,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_1
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(200)
+
+  def testFromScratch(self):
+    # Create LinearRegressor.
+    label = 5.
+    age = 17
+    # loss = (logits - label)^2 = (0 - 5.)^2 = 25.
+    mock_optimizer = self._mock_optimizer(expected_loss=25.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_optimizer)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        expected_global_step=num_steps,
+        expected_age_weight=0.,
+        expected_bias=0.)
+
+  def testFromCheckpoint(self):
+    # Create initial checkpoint.
+    age_weight = 10.0
+    bias = 5.0
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable([[age_weight]], name=AGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias = 17 * 10. + 5. = 175
+    # loss = (logits - label)^2 = (175 - 5)^2 = 28900
+    mock_optimizer = self._mock_optimizer(expected_loss=28900.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_optimizer)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((17,),)
+        }, ((5.,),)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testFromCheckpointMultiBatch(self):
+    # Create initial checkpoint.
+    age_weight = 10.0
+    bias = 5.0
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable([[age_weight]], name=AGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias
+    # logits[0] = 17 * 10. + 5. = 175
+    # logits[1] = 15 * 10. + 5. = 155
+    # loss = sum(logits - label)^2 = (175 - 5)^2 + (155 - 3)^2 = 52004
+    mock_optimizer = self._mock_optimizer(expected_loss=52004.)
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        model_dir=self._model_dir,
+        optimizer=mock_optimizer)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    linear_regressor.train(
+        input_fn=lambda: ({
+            'age': ((17,), (15,))
+        }, ((5.,), (3.,))),
+        steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+
+class BaseLinearClassifierTrainingTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _mock_optimizer(self, expected_loss=None):
+    expected_var_names = [
+        '%s/part_0:0' % AGE_WEIGHT_NAME,
+        '%s/part_0:0' % BIAS_NAME
+    ]
+
+    def _minimize(loss, global_step):
+      trainable_vars = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
+      self.assertItemsEqual(expected_var_names,
+                            [var.name for var in trainable_vars])
+
+      # Verify loss. We can't check the value directly, so we add an assert op.
+      self.assertEquals(0, loss.shape.ndims)
+      if expected_loss is None:
+        return tf.compat.v1.assign_add(global_step, 1).op
+      assert_loss = assert_close(
+          tf.cast(expected_loss, name='expected', dtype=tf.dtypes.float32),
+          loss,
+          name='assert_loss')
+      with tf.control_dependencies((assert_loss,)):
+        return tf.compat.v1.assign_add(global_step, 1).op
+
+    mock_optimizer = tf.compat.v1.test.mock.NonCallableMock(
+        spec=tf.compat.v1.train.Optimizer,
+        wraps=tf.compat.v1.train.Optimizer(
+            use_locking=False, name='my_optimizer'))
+    mock_optimizer.minimize = tf.compat.v1.test.mock.MagicMock(wraps=_minimize)
+
+    # NOTE: Estimator.params performs a deepcopy, which wreaks havoc with mocks.
+    # So, return mock_optimizer itself for deepcopy.
+    mock_optimizer.__deepcopy__ = lambda _: mock_optimizer
+    return mock_optimizer
+
+  def _assert_checkpoint(self,
+                         n_classes,
+                         expected_global_step,
+                         expected_age_weight=None,
+                         expected_bias=None):
+    logits_dimension = n_classes if n_classes > 2 else 1
+
+    shapes = {
+        name: shape
+        for (name, shape) in tf.train.list_variables(self._model_dir)
+    }
+
+    self.assertEqual([], shapes[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertEqual(
+        expected_global_step,
+        tf.train.load_variable(self._model_dir,
+                               tf.compat.v1.GraphKeys.GLOBAL_STEP))
+
+    self.assertEqual([1, logits_dimension], shapes[AGE_WEIGHT_NAME])
+    if expected_age_weight is not None:
+      self.assertAllEqual(
+          expected_age_weight,
+          tf.train.load_variable(self._model_dir, AGE_WEIGHT_NAME))
+
+    self.assertEqual([logits_dimension], shapes[BIAS_NAME])
+    if expected_bias is not None:
+      self.assertAllEqual(expected_bias,
+                          tf.train.load_variable(self._model_dir, BIAS_NAME))
+
+  def _testFromScratchWithDefaultOptimizer(self, n_classes):
+    label = 0
+    age = 17
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # Train for a few steps, and validate final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self._assert_checkpoint(n_classes, num_steps)
+
+  def testBinaryClassesFromScratchWithDefaultOptimizer(self):
+    self._testFromScratchWithDefaultOptimizer(n_classes=2)
+
+  def testMultiClassesFromScratchWithDefaultOptimizer(self):
+    self._testFromScratchWithDefaultOptimizer(n_classes=4)
+
+  def _testTrainWithTwoDimsLabel(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    data_rank_2 = np.array([[0], [1]])
+    self.assertEqual((2,), data_rank_1.shape)
+    self.assertEqual((2, 1), data_rank_2.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_2,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithTwoDimsLabel(self):
+    self._testTrainWithTwoDimsLabel(n_classes=2)
+
+  def testMultiClassesTrainWithTwoDimsLabel(self):
+    self._testTrainWithTwoDimsLabel(n_classes=4)
+
+  def _testTrainWithOneDimLabel(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    self.assertEqual((2,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'age': data_rank_1},
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithOneDimLabel(self):
+    self._testTrainWithOneDimLabel(n_classes=2)
+
+  def testMultiClassesTrainWithOneDimLabel(self):
+    self._testTrainWithOneDimLabel(n_classes=4)
+
+  def _testTrainWithTwoDimsWeight(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='w',
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    data_rank_2 = np.array([[0], [1]])
+    self.assertEqual((2,), data_rank_1.shape)
+    self.assertEqual((2, 1), data_rank_2.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_2
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithTwoDimsWeight(self):
+    self._testTrainWithTwoDimsWeight(n_classes=2)
+
+  def testMultiClassesTrainWithTwoDimsWeight(self):
+    self._testTrainWithTwoDimsWeight(n_classes=4)
+
+  def _testTrainWithOneDimWeight(self, n_classes):
+    batch_size = 20
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        weight_column='w',
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    data_rank_1 = np.array([0, 1])
+    self.assertEqual((2,), data_rank_1.shape)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={
+            'age': data_rank_1,
+            'w': data_rank_1
+        },
+        y=data_rank_1,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    est.train(train_input_fn, steps=200)
+    self._assert_checkpoint(n_classes, 200)
+
+  def testBinaryClassesTrainWithOneDimWeight(self):
+    self._testTrainWithOneDimWeight(n_classes=2)
+
+  def testMultiClassesTrainWithOneDimWeight(self):
+    self._testTrainWithOneDimWeight(n_classes=4)
+
+  def _testFromScratch(self, n_classes):
+    label = 1
+    age = 17
+    # For binary classifier:
+    #   loss = sigmoid_cross_entropy(logits, label) where logits=0 (weights are
+    #   all zero initially) and label = 1 so,
+    #      loss = 1 * -log ( sigmoid(logits) ) = 0.69315
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label) where logits are all 0s (weights are
+    #   all zero initially) and label = 1 so,
+    #      loss = 1 * -log ( 1.0 / n_classes )
+    # For this particular test case, as logits are same, the formular
+    # 1 * -log ( 1.0 / n_classes ) covers both binary and multi class cases.
+    mock_optimizer = self._mock_optimizer(
+        expected_loss=(-1 * math.log(1.0 / n_classes)))
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_optimizer,
+        model_dir=self._model_dir)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=num_steps,
+        expected_age_weight=[[0.]] if n_classes == 2 else [[0.] * n_classes],
+        expected_bias=[0.] if n_classes == 2 else [.0] * n_classes)
+
+  def testBinaryClassesFromScratch(self):
+    self._testFromScratch(n_classes=2)
+
+  def testMultiClassesFromScratch(self):
+    self._testFromScratch(n_classes=4)
+
+  def _testFromCheckpoint(self, n_classes):
+    # Create initial checkpoint.
+    label = 1
+    age = 17
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # For binary classifier:
+    #   logits = age * age_weight + bias = 17 * 2. - 35. = -1.
+    #   loss = sigmoid_cross_entropy(logits, label)
+    #   so, loss = 1 * -log ( sigmoid(-1) ) = 1.3133
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label)
+    #   where logits = 17 * age_weight + bias and label = 1
+    #   so, loss = 1 * -log ( soft_max(logits)[1] )
+    if n_classes == 2:
+      expected_loss = 1.3133
+    else:
+      logits = age_weight * age + bias
+      logits_exp = np.exp(logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_loss = -1 * math.log(softmax[0, label])
+
+    mock_optimizer = self._mock_optimizer(expected_loss=expected_loss)
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_optimizer,
+        model_dir=self._model_dir)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testBinaryClassesFromCheckpoint(self):
+    self._testFromCheckpoint(n_classes=2)
+
+  def testMultiClassesFromCheckpoint(self):
+    self._testFromCheckpoint(n_classes=4)
+
+  def _testFromCheckpointFloatLabels(self, n_classes):
+    """Tests float labels for binary classification."""
+    # Create initial checkpoint.
+    if n_classes > 2:
+      return
+    label = 0.8
+    age = 17
+    age_weight = [[2.0]]
+    bias = [-35.0]
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # logits = age * age_weight + bias = 17 * 2. - 35. = -1.
+    # loss = sigmoid_cross_entropy(logits, label)
+    # => loss = -0.8 * log(sigmoid(-1)) -0.2 * log(sigmoid(+1)) = 1.1132617
+    mock_optimizer = self._mock_optimizer(expected_loss=1.1132617)
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_optimizer,
+        model_dir=self._model_dir)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+
+  def testBinaryClassesFromCheckpointFloatLabels(self):
+    self._testFromCheckpointFloatLabels(n_classes=2)
+
+  def testMultiClassesFromCheckpointFloatLabels(self):
+    self._testFromCheckpointFloatLabels(n_classes=4)
+
+  def _testFromCheckpointMultiBatch(self, n_classes):
+    # Create initial checkpoint.
+    label = [1, 0]
+    age = [17.0, 18.5]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    # For binary classifier:
+    #   logits = age * age_weight + bias
+    #   logits[0] = 17 * 2. - 35. = -1.
+    #   logits[1] = 18.5 * 2. - 35. = 2.
+    #   loss = sigmoid_cross_entropy(logits, label)
+    #   so, loss[0] = 1 * -log ( sigmoid(-1) ) = 1.3133
+    #       loss[1] = (1 - 0) * -log ( 1- sigmoid(2) ) = 2.1269
+    #   expected_loss = loss[0] + loss[1]
+    # For multi class classifier:
+    #   loss = cross_entropy(logits, label)
+    #   where logits = [17, 18.5] * age_weight + bias and label = [1, 0]
+    #   so, loss = 1 * -log ( soft_max(logits)[label] )
+    #   expected_loss = loss[0] + loss[1]
+    if n_classes == 2:
+      expected_loss = 1.3133 + 2.1269
+    else:
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      expected_loss = expected_loss_0 + expected_loss_1
+
+    mock_optimizer = self._mock_optimizer(expected_loss=expected_loss)
+
+    est = linear.LinearClassifier(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        optimizer=mock_optimizer,
+        model_dir=self._model_dir)
+    self.assertEqual(0, mock_optimizer.minimize.call_count)
+
+    # Train for a few steps, and validate optimizer and final checkpoint.
+    num_steps = 10
+    est.train(input_fn=lambda: ({'age': (age)}, (label)), steps=num_steps)
+    self.assertEqual(1, mock_optimizer.minimize.call_count)
+    self._assert_checkpoint(
+        n_classes,
+        expected_global_step=initial_global_step + num_steps,
+        expected_age_weight=age_weight,
+        expected_bias=bias)
+
+  def testBinaryClassesFromCheckpointMultiBatch(self):
+    self._testFromCheckpointMultiBatch(n_classes=2)
+
+  def testMultiClassesFromCheckpointMultiBatch(self):
+    self._testFromCheckpointMultiBatch(n_classes=4)
+
+
+class BaseLinearClassifierEvaluationTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _test_evaluation_for_simple_data(self, n_classes):
+    label = 1
+    age = 1.
+
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[-11.0]] if n_classes == 2 else (np.reshape(
+        -11.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-30.0] if n_classes == 2 else [-30.0] * n_classes
+
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          100, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': ((age,),)
+        }, ((label,),)), steps=1)
+
+    if n_classes == 2:
+      # Binary classes: loss = sum(corss_entropy(41)) = 41.
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: 41.,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: 41.,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: 0.,
+          metric_keys.MetricKeys.LABEL_MEAN: 1.,
+          metric_keys.MetricKeys.ACCURACY_BASELINE: 1,
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 1.,
+      }
+    else:
+      # Multi classes: loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * age + bias
+      logits_exp = np.exp(logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_loss = -1 * math.log(softmax[0, label])
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_for_simple_data(self):
+    self._test_evaluation_for_simple_data(n_classes=2)
+
+  def test_multi_classes_evaluation_for_simple_data(self):
+    self._test_evaluation_for_simple_data(n_classes=4)
+
+  def _test_evaluation_batch(self, n_classes):
+    """Tests evaluation for batch_size==2."""
+    label = [1, 0]
+    age = [17., 18.]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': (age)
+        }, (label)), steps=1)
+
+    if n_classes == 2:
+      # Logits are (-1., 1.) labels are (1, 0).
+      # Loss is
+      #   loss for row 1: 1 * -log(sigmoid(-1)) = 1.3133
+      #   loss for row 2: (1 - 0) * -log(1 - sigmoid(1)) = 1.3133
+      expected_loss = 1.3133 * 2
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss / 2,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: 0.5,
+          metric_keys.MetricKeys.LABEL_MEAN: 0.5,
+          metric_keys.MetricKeys.ACCURACY_BASELINE: 0.5,
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 0.25,
+      }
+    else:
+      # Multi classes: loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      expected_loss = expected_loss_0 + expected_loss_1
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: expected_loss / 2,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_batch(self):
+    self._test_evaluation_batch(n_classes=2)
+
+  def test_multi_classes_evaluation_batch(self):
+    self._test_evaluation_batch(n_classes=4)
+
+  def _test_evaluation_weights(self, n_classes):
+    """Tests evaluation with weights."""
+
+    label = [1, 0]
+    age = [17., 18.]
+    weights = [1., 2.]
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
+        2.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
+    initial_global_step = 100
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(
+          initial_global_step,
+          name=tf.compat.v1.GraphKeys.GLOBAL_STEP,
+          dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        n_classes=n_classes,
+        weight_column='w',
+        model_dir=self._model_dir)
+    eval_metrics = est.evaluate(
+        input_fn=lambda: ({
+            'age': (age),
+            'w': (weights)
+        }, (label)), steps=1)
+
+    if n_classes == 2:
+      # Logits are (-1., 1.) labels are (1, 0).
+      # Loss is
+      #   loss for row 1: 1 * -log(sigmoid(-1)) = 1.3133
+      #   loss for row 2: (1 - 0) * -log(1 - sigmoid(1)) = 1.3133
+      #   weights = [1., 2.]
+      expected_loss = 1.3133 * (1. + 2.)
+      loss_mean = expected_loss / (1.0 + 2.0)
+      label_mean = np.average(label, weights=weights)
+      logits = [-1, 1]
+      logistics = sigmoid(np.array(logits))
+      predictions_mean = np.average(logistics, weights=weights)
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.LOSS_MEAN: loss_mean,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+          metric_keys.MetricKeys.PRECISION: 0.,
+          metric_keys.MetricKeys.RECALL: 0.,
+          metric_keys.MetricKeys.PREDICTION_MEAN: predictions_mean,
+          metric_keys.MetricKeys.LABEL_MEAN: label_mean,
+          metric_keys.MetricKeys.ACCURACY_BASELINE:
+              (max(label_mean, 1 - label_mean)),
+          metric_keys.MetricKeys.AUC: 0.,
+          metric_keys.MetricKeys.AUC_PR: 0.1668,
+      }
+    else:
+      # Multi classes: unweighted_loss = 1 * -log ( soft_max(logits)[label] )
+      logits = age_weight * np.reshape(age, (2, 1)) + bias
+      logits_exp = np.exp(logits)
+      softmax_row_0 = logits_exp[0] / logits_exp[0].sum()
+      softmax_row_1 = logits_exp[1] / logits_exp[1].sum()
+      expected_loss_0 = -1 * math.log(softmax_row_0[label[0]])
+      expected_loss_1 = -1 * math.log(softmax_row_1[label[1]])
+      loss_mean = np.average([expected_loss_0, expected_loss_1],
+                             weights=weights)
+      expected_loss = loss_mean * np.sum(weights)
+
+      expected_metrics = {
+          metric_keys.MetricKeys.LOSS: expected_loss,
+          metric_keys.MetricKeys.LOSS_MEAN: loss_mean,
+          tf.compat.v1.GraphKeys.GLOBAL_STEP: 100,
+          metric_keys.MetricKeys.ACCURACY: 0.,
+      }
+
+    self.assertAllClose(
+        sorted_key_dict(expected_metrics),
+        sorted_key_dict(eval_metrics),
+        rtol=1e-3)
+
+  def test_binary_classes_evaluation_weights(self):
+    self._test_evaluation_weights(n_classes=2)
+
+  def test_multi_classes_evaluation_weights(self):
+    self._test_evaluation_weights(n_classes=4)
+
+
+class BaseLinearClassifierPredictTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _testPredictions(self, n_classes, label_vocabulary, label_output_fn):
+    """Tests predict when all variables are one-dimensional."""
+    age = 1.
+
+    # For binary case, the expected weight has shape (1,1). For multi class
+    # case, the shape is (1, n_classes). In order to test the weights, set
+    # weights as 2.0 * range(n_classes).
+    age_weight = [[-11.0]] if n_classes == 2 else (np.reshape(
+        -11.0 * np.array(list(range(n_classes)), dtype=np.float32),
+        (1, n_classes)))
+    bias = [10.0] if n_classes == 2 else [10.0] * n_classes
+
+    with tf.Graph().as_default():
+      tf.Variable(age_weight, name=AGE_WEIGHT_NAME)
+      tf.Variable(bias, name=BIAS_NAME)
+      tf.Variable(100, name='global_step', dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    est = self._linear_classifier_fn(
+        feature_columns=(self._fc_lib.numeric_column('age'),),
+        label_vocabulary=label_vocabulary,
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'age': np.array([[age]])},
+        y=None,
+        batch_size=1,
+        num_epochs=1,
+        shuffle=False)
+    predictions = list(est.predict(input_fn=predict_input_fn))
+
+    if n_classes == 2:
+      scalar_logits = np.asscalar(
+          np.reshape(np.array(age_weight) * age + bias, (1,)))
+      two_classes_logits = [0, scalar_logits]
+      two_classes_logits_exp = np.exp(two_classes_logits)
+      softmax = two_classes_logits_exp / two_classes_logits_exp.sum()
+
+      expected_predictions = {
+          'class_ids': [0],
+          'all_class_ids': [0, 1],
+          'classes': [label_output_fn(0)],
+          'all_classes': [label_output_fn(0),
+                          label_output_fn(1)],
+          'logistic': [sigmoid(np.array(scalar_logits))],
+          'logits': [scalar_logits],
+          'probabilities': softmax,
+      }
+    else:
+      onedim_logits = np.reshape(np.array(age_weight) * age + bias, (-1,))
+      class_ids = onedim_logits.argmax()
+      all_class_ids = list(range(len(onedim_logits)))
+      logits_exp = np.exp(onedim_logits)
+      softmax = logits_exp / logits_exp.sum()
+      expected_predictions = {
+          'class_ids': [class_ids],
+          'all_class_ids': all_class_ids,
+          'classes': [label_output_fn(class_ids)],
+          'all_classes': [label_output_fn(i) for i in all_class_ids],
+          'logits': onedim_logits,
+          'probabilities': softmax,
+      }
+
+    self.assertEqual(1, len(predictions))
+    # assertAllClose cannot handle byte type.
+    self.assertEqual(expected_predictions['classes'], predictions[0]['classes'])
+    expected_predictions.pop('classes')
+    predictions[0].pop('classes')
+    self.assertAllEqual(expected_predictions['all_classes'],
+                        predictions[0]['all_classes'])
+    expected_predictions.pop('all_classes')
+    predictions[0].pop('all_classes')
+    self.assertAllClose(
+        sorted_key_dict(expected_predictions), sorted_key_dict(predictions[0]))
+
+  def testBinaryClassesWithoutLabelVocabulary(self):
+    n_classes = 2
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=None,
+        label_output_fn=lambda x: ('%s' % x).encode())
+
+  def testBinaryClassesWithLabelVocabulary(self):
+    n_classes = 2
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def testMultiClassesWithoutLabelVocabulary(self):
+    n_classes = 4
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=None,
+        label_output_fn=lambda x: ('%s' % x).encode())
+
+  def testMultiClassesWithLabelVocabulary(self):
+    n_classes = 4
+    self._testPredictions(
+        n_classes,
+        label_vocabulary=['class_vocab_{}'.format(i) for i in range(n_classes)],
+        label_output_fn=lambda x: ('class_vocab_%s' % x).encode())
+
+  def testSparseCombiner(self):
+    w_a = 2.0
+    w_b = 3.0
+    w_c = 5.0
+    bias = 5.0
+    with tf.Graph().as_default():
+      tf.Variable([[w_a], [w_b], [w_c]], name=LANGUAGE_WEIGHT_NAME)
+      tf.Variable([bias], name=BIAS_NAME)
+      tf.Variable(
+          1, name=tf.compat.v1.GraphKeys.GLOBAL_STEP, dtype=tf.dtypes.int64)
+      save_variables_to_ckpt(self._model_dir)
+
+    def _input_fn():
+      return tf.compat.v1.data.Dataset.from_tensors({
+          'language':
+              tf.sparse.SparseTensor(
+                  values=['a', 'c', 'b', 'c'],
+                  indices=[[0, 0], [0, 1], [1, 0], [1, 1]],
+                  dense_shape=[2, 2]),
+      })
+
+    feature_columns = (self._fc_lib.categorical_column_with_vocabulary_list(
+        'language', vocabulary_list=['a', 'b', 'c']),)
+
+    # Check prediction for each sparse_combiner.
+    # With sparse_combiner = 'sum', we have
+    # logits_1 = w_a + w_c + bias
+    #          = 2.0 + 5.0 + 5.0 = 12.0
+    # logits_2 = w_b + w_c + bias
+    #          = 3.0 + 5.0 + 5.0 = 13.0
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns, model_dir=self._model_dir)
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[12.0], [13.0]], predicted_scores)
+
+    # With sparse_combiner = 'mean', we have
+    # logits_1 = 1/2 * (w_a + w_c) + bias
+    #          = 1/2 * (2.0 + 5.0) + 5.0 = 8.5
+    # logits_2 = 1/2 * (w_b + w_c) + bias
+    #          = 1/2 * (3.0 + 5.0) + 5.0 = 9.0
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='mean')
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[8.5], [9.0]], predicted_scores)
+
+    # With sparse_combiner = 'sqrtn', we have
+    # logits_1 = sqrt(2)/2 * (w_a + w_c) + bias
+    #          = sqrt(2)/2 * (2.0 + 5.0) + 5.0 = 9.94974
+    # logits_2 = sqrt(2)/2 * (w_b + w_c) + bias
+    #          = sqrt(2)/2 * (3.0 + 5.0) + 5.0 = 10.65685
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        model_dir=self._model_dir,
+        sparse_combiner='sqrtn')
+    predictions = linear_classifier.predict(input_fn=_input_fn)
+    predicted_scores = list([x['logits'] for x in predictions])
+    self.assertAllClose([[9.94974], [10.65685]], predicted_scores)
+
+
+class BaseLinearClassifierIntegrationTest(object):
+
+  def __init__(self, linear_classifier_fn, fc_lib=feature_column):
+    self._linear_classifier_fn = linear_classifier_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    self._model_dir = tempfile.mkdtemp()
+
+  def tearDown(self):
+    if self._model_dir:
+      shutil.rmtree(self._model_dir)
+
+  def _test_complete_flow(self, n_classes, train_input_fn, eval_input_fn,
+                          predict_input_fn, input_dimension, prediction_length):
+    feature_columns = [
+        self._fc_lib.numeric_column('x', shape=(input_dimension,))
+    ]
+    est = self._linear_classifier_fn(
+        feature_columns=feature_columns,
+        n_classes=n_classes,
+        model_dir=self._model_dir)
+
+    # TRAIN
+    # learn y = x
+    est.train(train_input_fn, steps=200)
+
+    # EVALUTE
+    scores = est.evaluate(eval_input_fn)
+    self.assertEqual(200, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
+    self.assertIn(metric_keys.MetricKeys.LOSS, six.iterkeys(scores))
+
+    # PREDICT
+    predictions = np.array(
+        [x['classes'] for x in est.predict(predict_input_fn)])
+    self.assertAllEqual((prediction_length, 1), predictions.shape)
+
+    # EXPORT
+    feature_spec = tf.compat.v1.feature_column.make_parse_example_spec(
+        feature_columns)
+    serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
+        feature_spec)
+    export_dir = est.export_saved_model(tempfile.mkdtemp(),
+                                        serving_input_receiver_fn)
+    self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
+
+  def _test_numpy_input_fn(self, n_classes):
+    """Tests complete flow with numpy_input_fn."""
+    input_dimension = 4
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * input_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, input_dimension)
+    target = np.array([1] * batch_size)
+
+    train_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=target,
+        batch_size=batch_size,
+        num_epochs=None,
+        shuffle=True)
+    eval_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=target,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+    predict_input_fn = numpy_io.numpy_input_fn(
+        x={'x': data},
+        y=None,
+        batch_size=batch_size,
+        num_epochs=1,
+        shuffle=False)
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_numpy_input_fn(self):
+    self._test_numpy_input_fn(n_classes=2)
+
+  def test_multi_classes_numpy_input_fn(self):
+    self._test_numpy_input_fn(n_classes=4)
+
+  def _test_pandas_input_fn(self, n_classes):
+    """Tests complete flow with pandas_input_fn."""
+    if not HAS_PANDAS:
+      return
+
+    # Pandas DataFrame natually supports 1 dim data only.
+    input_dimension = 1
+    batch_size = 10
+    data = np.array([1., 2., 3., 4.], dtype=np.float32)
+    target = np.array([1, 0, 1, 0], dtype=np.int32)
+    x = pd.DataFrame({'x': data})
+    y = pd.Series(target)
+    prediction_length = 4
+
+    train_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
+    eval_input_fn = pandas_io.pandas_input_fn(
+        x=x, y=y, batch_size=batch_size, shuffle=False)
+    predict_input_fn = pandas_io.pandas_input_fn(
+        x=x, batch_size=batch_size, shuffle=False)
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=train_input_fn,
+        eval_input_fn=eval_input_fn,
+        predict_input_fn=predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_pandas_input_fn(self):
+    self._test_pandas_input_fn(n_classes=2)
+
+  def test_multi_classes_pandas_input_fn(self):
+    self._test_pandas_input_fn(n_classes=4)
+
+  def _test_input_fn_from_parse_example(self, n_classes):
+    """Tests complete flow with input_fn constructed from parse_example."""
+    input_dimension = 2
+    batch_size = 10
+    prediction_length = batch_size
+    data = np.linspace(0., 2., batch_size * input_dimension, dtype=np.float32)
+    data = data.reshape(batch_size, input_dimension)
+    target = np.array([1] * batch_size, dtype=np.int64)
+
+    serialized_examples = []
+    for x, y in zip(data, target):
+      example = example_pb2.Example(
+          features=feature_pb2.Features(
+              feature={
+                  'x':
+                      feature_pb2.Feature(
+                          float_list=feature_pb2.FloatList(value=x)),
+                  'y':
+                      feature_pb2.Feature(
+                          int64_list=feature_pb2.Int64List(value=[y])),
+              }))
+      serialized_examples.append(example.SerializeToString())
+
+    feature_spec = {
+        'x': tf.io.FixedLenFeature([input_dimension], tf.dtypes.float32),
+        'y': tf.io.FixedLenFeature([1], tf.dtypes.int64),
+    }
+
+    def _train_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(serialized_examples,
+                                                  feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _eval_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      labels = features.pop('y')
+      return features, labels
+
+    def _predict_input_fn():
+      feature_map = tf.compat.v1.io.parse_example(
+          tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
+          feature_spec)
+      features = queue_parsed_features(feature_map)
+      features.pop('y')
+      return features, None
+
+    self._test_complete_flow(
+        n_classes=n_classes,
+        train_input_fn=_train_input_fn,
+        eval_input_fn=_eval_input_fn,
+        predict_input_fn=_predict_input_fn,
+        input_dimension=input_dimension,
+        prediction_length=prediction_length)
+
+  def test_binary_classes_input_fn_from_parse_example(self):
+    self._test_input_fn_from_parse_example(n_classes=2)
+
+  def test_multi_classes_input_fn_from_parse_example(self):
+    self._test_input_fn_from_parse_example(n_classes=4)
+
+
+class BaseLinearLogitFnTest(object):
+
+  def __init__(self, fc_lib=feature_column):
+    self._fc_lib = fc_lib
+
+  def test_basic_logit_correctness(self):
+    """linear_logit_fn simply wraps feature_column_lib.linear_model."""
+    age = self._fc_lib.numeric_column('age')
+    with tf.Graph().as_default():
+      logit_fn = linear.linear_logit_fn_builder(units=2, feature_columns=[age])
+      logits = logit_fn(features={'age': [[23.], [31.]]})
+      bias_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES,
+          'linear_model/bias_weights')[0]
+      age_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, 'linear_model/age')[0]
+      with tf.compat.v1.Session() as sess:
+        sess.run([tf.compat.v1.initializers.global_variables()])
+        self.assertAllClose([[0., 0.], [0., 0.]], logits.eval())
+        sess.run(bias_var.assign([10., 5.]))
+        self.assertAllClose([[10., 5.], [10., 5.]], logits.eval())
+        sess.run(age_var.assign([[2.0, 3.0]]))
+        # [2 * 23 + 10, 3 * 23 + 5] = [56, 74].
+        # [2 * 31 + 10, 3 * 31 + 5] = [72, 98]
+        self.assertAllClose([[56., 74.], [72., 98.]], logits.eval())
+
+  def test_compute_fraction_of_zero(self):
+    """Tests the calculation of sparsity."""
+    if self._fc_lib != feature_column:
+      return
+    age = tf.feature_column.numeric_column('age')
+    occupation = feature_column.categorical_column_with_hash_bucket(
+        'occupation', hash_bucket_size=5)
+    with tf.Graph().as_default():
+      cols_to_vars = {}
+      tf.compat.v1.feature_column.linear_model(
+          features={
+              'age': [[23.], [31.]],
+              'occupation': [['doctor'], ['engineer']]
+          },
+          feature_columns=[age, occupation],
+          units=3,
+          cols_to_vars=cols_to_vars)
+      cols_to_vars.pop('bias')
+      fraction_zero = linear._compute_fraction_of_zero(
+          list(cols_to_vars.values()))
+      age_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, 'linear_model/age')[0]
+      with tf.compat.v1.Session() as sess:
+        sess.run([tf.compat.v1.initializers.global_variables()])
+        # Upon initialization, all variables will be zero.
+        self.assertAllClose(1, fraction_zero.eval())
+
+        sess.run(age_var.assign([[2.0, 0.0, -1.0]]))
+        # 1 of the 3 age weights are zero, and all of the 15 (5 hash buckets
+        # x 3-dim output) are zero.
+        self.assertAllClose(16. / 18., fraction_zero.eval())
+
+  def test_compute_fraction_of_zero_v2(self):
+    """Tests the calculation of sparsity."""
+    if self._fc_lib != feature_column_v2:
+      return
+
+    age = tf.feature_column.numeric_column('age')
+    occupation = tf.feature_column.categorical_column_with_hash_bucket(
+        'occupation', hash_bucket_size=5)
+    with tf.Graph().as_default():
+      model = feature_column_v2.LinearModel(
+          feature_columns=[age, occupation], units=3, name='linear_model')
+      features = {
+          'age': [[23.], [31.]],
+          'occupation': [['doctor'], ['engineer']]
+      }
+      model(features)
+      variables = model.variables
+      variables.remove(model.bias)
+      fraction_zero = linear._compute_fraction_of_zero(variables)
+      age_var = tf.compat.v1.get_collection(
+          tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, 'linear_model/age')[0]
+      with tf.compat.v1.Session() as sess:
+        sess.run([tf.compat.v1.initializers.global_variables()])
+        # Upon initialization, all variables will be zero.
+        self.assertAllClose(1, fraction_zero.eval())
+
+        sess.run(age_var.assign([[2.0, 0.0, -1.0]]))
+        # 1 of the 3 age weights are zero, and all of the 15 (5 hash buckets
+        # x 3-dim output) are zero.
+        self.assertAllClose(16. / 18., fraction_zero.eval())
+
+
+class BaseLinearWarmStartingTest(object):
+
+  def __init__(self,
+               _linear_classifier_fn,
+               _linear_regressor_fn,
+               fc_lib=feature_column):
+    self._linear_classifier_fn = _linear_classifier_fn
+    self._linear_regressor_fn = _linear_regressor_fn
+    self._fc_lib = fc_lib
+
+  def setUp(self):
+    # Create a directory to save our old checkpoint and vocabularies to.
+    self._ckpt_and_vocab_dir = tempfile.mkdtemp()
+
+    # Make a dummy input_fn.
+    def _input_fn():
+      features = {
+          'age': [[23.], [31.]],
+          'age_in_years': [[23.], [31.]],
+          'occupation': [['doctor'], ['consultant']]
+      }
+      return features, [0, 1]
+
+    self._input_fn = _input_fn
+
+  def tearDown(self):
+    # Clean up checkpoint / vocab dir.
+    tf.compat.v1.summary.FileWriterCache.clear()
+    shutil.rmtree(self._ckpt_and_vocab_dir)
+
+  def test_classifier_basic_warm_starting(self):
+    """Tests correctness of LinearClassifier default warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=linear_classifier.model_dir)
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_linear_classifier.get_variable_names():
+      self.assertAllClose(
+          linear_classifier.get_variable_value(variable_name),
+          warm_started_linear_classifier.get_variable_value(variable_name))
+
+  def test_regressor_basic_warm_starting(self):
+    """Tests correctness of LinearRegressor default warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearRegressor and train to save a checkpoint.
+    linear_regressor = self._linear_regressor_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        optimizer='SGD')
+    linear_regressor.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearRegressor, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_regressor = self._linear_regressor_fn(
+        feature_columns=[age],
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=linear_regressor.model_dir)
+
+    warm_started_linear_regressor.train(input_fn=self._input_fn, max_steps=1)
+    for variable_name in warm_started_linear_regressor.get_variable_names():
+      self.assertAllClose(
+          linear_regressor.get_variable_value(variable_name),
+          warm_started_linear_regressor.get_variable_value(variable_name))
+
+  def test_warm_starting_selective_variables(self):
+    """Tests selecting variables to warm-start."""
+    age = self._fc_lib.numeric_column('age')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        # The provided regular expression will only warm-start the age variable
+        # and not the bias.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            vars_to_warm_start='.*(age).*'))
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    self.assertAllClose(
+        linear_classifier.get_variable_value(AGE_WEIGHT_NAME),
+        warm_started_linear_classifier.get_variable_value(AGE_WEIGHT_NAME))
+    # Bias should still be zero from initialization.
+    self.assertAllClose(
+        [0.0] * 4, warm_started_linear_classifier.get_variable_value(BIAS_NAME))
+
+  def test_warm_starting_with_vocab_remapping_and_partitioning(self):
+    """Tests warm-starting with vocab remapping and partitioning."""
+    vocab_list = ['doctor', 'lawyer', 'consultant']
+    vocab_file = os.path.join(self._ckpt_and_vocab_dir, 'occupation_vocab')
+    with open(vocab_file, 'w') as f:
+      f.write('\n'.join(vocab_list))
+    occupation = self._fc_lib.categorical_column_with_vocabulary_file(
+        'occupation',
+        vocabulary_file=vocab_file,
+        vocabulary_size=len(vocab_list))
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    partitioner = tf.compat.v1.fixed_size_partitioner(num_shards=2)
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[occupation],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD',
+        partitioner=partitioner)
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).  Use a new FeatureColumn with a
+    # different vocabulary for occupation.
+    new_vocab_list = ['doctor', 'consultant', 'engineer']
+    new_vocab_file = os.path.join(self._ckpt_and_vocab_dir,
+                                  'new_occupation_vocab')
+    with open(new_vocab_file, 'w') as f:
+      f.write('\n'.join(new_vocab_list))
+    new_occupation = self._fc_lib.categorical_column_with_vocabulary_file(
+        'occupation',
+        vocabulary_file=new_vocab_file,
+        vocabulary_size=len(new_vocab_list))
+    # We can create our VocabInfo object from the new and old occupation
+    # FeatureColumn's.
+    occupation_vocab_info = estimator.VocabInfo(
+        new_vocab=new_occupation.vocabulary_file,
+        new_vocab_size=new_occupation.vocabulary_size,
+        num_oov_buckets=new_occupation.num_oov_buckets,
+        old_vocab=occupation.vocabulary_file,
+        old_vocab_size=occupation.vocabulary_size,
+        # Can't use constant_initializer with load_and_remap.  In practice,
+        # use a truncated normal initializer.
+        backup_initializer=tf.compat.v1.initializers.random_uniform(
+            minval=0.39, maxval=0.39))
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[occupation],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            var_name_to_vocab_info={
+                OCCUPATION_WEIGHT_NAME: occupation_vocab_info
+            },
+            # Explicitly providing None here will only warm-start variables
+            # referenced in var_name_to_vocab_info (the bias will not be
+            # warm-started).
+            vars_to_warm_start=None),
+        partitioner=partitioner)
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    # 'doctor' was ID-0 and still ID-0.
+    self.assertAllClose(
+        linear_classifier.get_variable_value(OCCUPATION_WEIGHT_NAME)[0, :],
+        warm_started_linear_classifier.get_variable_value(
+            OCCUPATION_WEIGHT_NAME)[0, :])
+    # 'consultant' was ID-2 and now ID-1.
+    self.assertAllClose(
+        linear_classifier.get_variable_value(OCCUPATION_WEIGHT_NAME)[2, :],
+        warm_started_linear_classifier.get_variable_value(
+            OCCUPATION_WEIGHT_NAME)[1, :])
+    # 'engineer' is a new entry and should be initialized with the
+    # backup_initializer in VocabInfo.
+    self.assertAllClose([0.39] * 4,
+                        warm_started_linear_classifier.get_variable_value(
+                            OCCUPATION_WEIGHT_NAME)[2, :])
+    # Bias should still be zero (from initialization logic).
+    self.assertAllClose(
+        [0.0] * 4, warm_started_linear_classifier.get_variable_value(BIAS_NAME))
+
+  def test_warm_starting_with_naming_change(self):
+    """Tests warm-starting with a Tensor name remapping."""
+    age_in_years = self._fc_lib.numeric_column('age_in_years')
+
+    # Create a LinearClassifier and train to save a checkpoint.
+    linear_classifier = self._linear_classifier_fn(
+        feature_columns=[age_in_years],
+        model_dir=self._ckpt_and_vocab_dir,
+        n_classes=4,
+        optimizer='SGD')
+    linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+
+    # Create a second LinearClassifier, warm-started from the first.  Use a
+    # learning_rate = 0.0 optimizer to check values (use SGD so we don't have
+    # accumulator values that change).
+    warm_started_linear_classifier = self._linear_classifier_fn(
+        feature_columns=[self._fc_lib.numeric_column('age')],
+        n_classes=4,
+        optimizer=tf.compat.v1.train.GradientDescentOptimizer(
+            learning_rate=0.0),
+        # The 'age' variable correspond to the 'age_in_years' variable in the
+        # previous model.
+        warm_start_from=estimator.WarmStartSettings(
+            ckpt_to_initialize_from=linear_classifier.model_dir,
+            var_name_to_prev_var_name={
+                AGE_WEIGHT_NAME: AGE_WEIGHT_NAME.replace('age', 'age_in_years')
+            }))
+
+    warm_started_linear_classifier.train(input_fn=self._input_fn, max_steps=1)
+    self.assertAllClose(
+        linear_classifier.get_variable_value(
+            AGE_WEIGHT_NAME.replace('age', 'age_in_years')),
+        warm_started_linear_classifier.get_variable_value(AGE_WEIGHT_NAME))
+    # The bias is also warm-started (with no name remapping).
+    self.assertAllClose(
+        linear_classifier.get_variable_value(BIAS_NAME),
+        warm_started_linear_classifier.get_variable_value(BIAS_NAME))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/early_stopping.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/early_stopping.py
new file mode 100644
index 00000000..47324933
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/early_stopping.py
@@ -0,0 +1,602 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utilities for early stopping."""
+
+import collections
+import operator
+import os
+
+import tensorflow as tf
+from tensorflow.python.distribute import distribution_strategy_context
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import array_ops
+from tensorflow.python.ops import init_ops
+from tensorflow.python.ops import variable_scope
+from tensorflow.python.platform import tf_logging
+from tensorflow.python.training import basic_session_run_hooks
+from tensorflow.python.training import session_run_hook
+from tensorflow.python.training import training_util
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+
+
+_EVENT_FILE_GLOB_PATTERN = 'events.out.tfevents.*'
+
+
+@estimator_export('estimator.experimental.make_early_stopping_hook')
+def make_early_stopping_hook(estimator,
+                             should_stop_fn,
+                             run_every_secs=60,
+                             run_every_steps=None):
+  """Creates early-stopping hook.
+
+  Returns a `SessionRunHook` that stops training when `should_stop_fn` returns
+  `True`.
+
+  Usage example:
+
+  ```python
+  estimator = ...
+  hook = early_stopping.make_early_stopping_hook(
+      estimator, should_stop_fn=make_stop_fn(...))
+  train_spec = tf.estimator.TrainSpec(..., hooks=[hook])
+  tf.estimator.train_and_evaluate(estimator, train_spec, ...)
+  ```
+
+  Caveat: Current implementation supports early-stopping both training and
+  evaluation in local mode. In distributed mode, training can be stopped but
+  evaluation (where it's a separate job) will indefinitely wait for new model
+  checkpoints to evaluate, so you will need other means to detect and stop it.
+  Early-stopping evaluation in distributed mode requires changes in
+  `train_and_evaluate` API and will be addressed in a future revision.
+
+  Args:
+    estimator: A `tf.estimator.Estimator` instance.
+    should_stop_fn: `callable`, function that takes no arguments and returns a
+      `bool`. If the function returns `True`, stopping will be initiated by the
+      chief.
+    run_every_secs: If specified, calls `should_stop_fn` at an interval of
+      `run_every_secs` seconds. Defaults to 60 seconds. Either this or
+      `run_every_steps` must be set.
+    run_every_steps: If specified, calls `should_stop_fn` every
+      `run_every_steps` steps. Either this or `run_every_secs` must be set.
+
+  Returns:
+    A `SessionRunHook` that periodically executes `should_stop_fn` and initiates
+    early stopping if the function returns `True`.
+
+  Raises:
+    TypeError: If `estimator` is not of type `tf.estimator.Estimator`.
+    ValueError: If both `run_every_secs` and `run_every_steps` are set.
+  """
+  if not isinstance(estimator, estimator_lib.Estimator):
+    raise TypeError('`estimator` must have type `tf.estimator.Estimator`. '
+                    'Got: {}'.format(type(estimator)))
+
+  if run_every_secs is not None and run_every_steps is not None:
+    raise ValueError('Only one of `run_every_secs` and `run_every_steps` must '
+                     'be set.')
+
+  train_distribute = estimator.config.train_distribute
+  mwms = ['CollectiveAllReduceStrategy', 'MultiWorkerMirroredStrategy']
+  if train_distribute and (train_distribute.__class__.__name__.startswith(
+      strategy) for strategy in mwms):
+    if run_every_secs:
+      raise ValueError('run_every_secs should not be set when using '
+                       'MultiWorkerMirroredStrategy.')
+    return _MultiWorkerEarlyStoppingHook(should_stop_fn, run_every_steps)
+
+  if estimator.config.is_chief:
+    return _StopOnPredicateHook(should_stop_fn, run_every_secs, run_every_steps)
+  else:
+    return _CheckForStoppingHook()
+
+
+@estimator_export('estimator.experimental.stop_if_higher_hook')
+def stop_if_higher_hook(estimator,
+                        metric_name,
+                        threshold,
+                        eval_dir=None,
+                        min_steps=0,
+                        run_every_secs=60,
+                        run_every_steps=None):
+  """Creates hook to stop if the given metric is higher than the threshold.
+
+  Usage example:
+
+  ```python
+  estimator = ...
+  # Hook to stop training if accuracy becomes higher than 0.9.
+  hook = early_stopping.stop_if_higher_hook(estimator, "accuracy", 0.9)
+  train_spec = tf.estimator.TrainSpec(..., hooks=[hook])
+  tf.estimator.train_and_evaluate(estimator, train_spec, ...)
+  ```
+
+  Caveat: Current implementation supports early-stopping both training and
+  evaluation in local mode. In distributed mode, training can be stopped but
+  evaluation (where it's a separate job) will indefinitely wait for new model
+  checkpoints to evaluate, so you will need other means to detect and stop it.
+  Early-stopping evaluation in distributed mode requires changes in
+  `train_and_evaluate` API and will be addressed in a future revision.
+
+  Args:
+    estimator: A `tf.estimator.Estimator` instance.
+    metric_name: `str`, metric to track. "loss", "accuracy", etc.
+    threshold: Numeric threshold for the given metric.
+    eval_dir: If set, directory containing summary files with eval metrics. By
+      default, `estimator.eval_dir()` will be used.
+    min_steps: `int`, stop is never requested if global step is less than this
+      value. Defaults to 0.
+    run_every_secs: If specified, calls `should_stop_fn` at an interval of
+      `run_every_secs` seconds. Defaults to 60 seconds. Either this or
+      `run_every_steps` must be set.
+    run_every_steps: If specified, calls `should_stop_fn` every
+      `run_every_steps` steps. Either this or `run_every_secs` must be set.
+
+  Returns:
+    An early-stopping hook of type `SessionRunHook` that periodically checks
+    if the given metric is higher than specified threshold and initiates
+    early stopping if true.
+  """
+  return _stop_if_threshold_crossed_hook(
+      estimator=estimator,
+      metric_name=metric_name,
+      threshold=threshold,
+      higher_is_better=True,
+      eval_dir=eval_dir,
+      min_steps=min_steps,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+@estimator_export('estimator.experimental.stop_if_lower_hook')
+def stop_if_lower_hook(estimator,
+                       metric_name,
+                       threshold,
+                       eval_dir=None,
+                       min_steps=0,
+                       run_every_secs=60,
+                       run_every_steps=None):
+  """Creates hook to stop if the given metric is lower than the threshold.
+
+  Usage example:
+
+  ```python
+  estimator = ...
+  # Hook to stop training if loss becomes lower than 100.
+  hook = early_stopping.stop_if_lower_hook(estimator, "loss", 100)
+  train_spec = tf.estimator.TrainSpec(..., hooks=[hook])
+  tf.estimator.train_and_evaluate(estimator, train_spec, ...)
+  ```
+
+  Caveat: Current implementation supports early-stopping both training and
+  evaluation in local mode. In distributed mode, training can be stopped but
+  evaluation (where it's a separate job) will indefinitely wait for new model
+  checkpoints to evaluate, so you will need other means to detect and stop it.
+  Early-stopping evaluation in distributed mode requires changes in
+  `train_and_evaluate` API and will be addressed in a future revision.
+
+  Args:
+    estimator: A `tf.estimator.Estimator` instance.
+    metric_name: `str`, metric to track. "loss", "accuracy", etc.
+    threshold: Numeric threshold for the given metric.
+    eval_dir: If set, directory containing summary files with eval metrics. By
+      default, `estimator.eval_dir()` will be used.
+    min_steps: `int`, stop is never requested if global step is less than this
+      value. Defaults to 0.
+    run_every_secs: If specified, calls `should_stop_fn` at an interval of
+      `run_every_secs` seconds. Defaults to 60 seconds. Either this or
+      `run_every_steps` must be set.
+    run_every_steps: If specified, calls `should_stop_fn` every
+      `run_every_steps` steps. Either this or `run_every_secs` must be set.
+
+  Returns:
+    An early-stopping hook of type `SessionRunHook` that periodically checks
+    if the given metric is lower than specified threshold and initiates
+    early stopping if true.
+  """
+  return _stop_if_threshold_crossed_hook(
+      estimator=estimator,
+      metric_name=metric_name,
+      threshold=threshold,
+      higher_is_better=False,
+      eval_dir=eval_dir,
+      min_steps=min_steps,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+@estimator_export('estimator.experimental.stop_if_no_increase_hook')
+def stop_if_no_increase_hook(estimator,
+                             metric_name,
+                             max_steps_without_increase,
+                             eval_dir=None,
+                             min_steps=0,
+                             run_every_secs=60,
+                             run_every_steps=None):
+  """Creates hook to stop if metric does not increase within given max steps.
+
+  Usage example:
+
+  ```python
+  estimator = ...
+  # Hook to stop training if accuracy does not increase in over 100000 steps.
+  hook = early_stopping.stop_if_no_increase_hook(estimator, "accuracy", 100000)
+  train_spec = tf.estimator.TrainSpec(..., hooks=[hook])
+  tf.estimator.train_and_evaluate(estimator, train_spec, ...)
+  ```
+
+  Caveat: Current implementation supports early-stopping both training and
+  evaluation in local mode. In distributed mode, training can be stopped but
+  evaluation (where it's a separate job) will indefinitely wait for new model
+  checkpoints to evaluate, so you will need other means to detect and stop it.
+  Early-stopping evaluation in distributed mode requires changes in
+  `train_and_evaluate` API and will be addressed in a future revision.
+
+  Args:
+    estimator: A `tf.estimator.Estimator` instance.
+    metric_name: `str`, metric to track. "loss", "accuracy", etc.
+    max_steps_without_increase: `int`, maximum number of training steps with no
+      increase in the given metric.
+    eval_dir: If set, directory containing summary files with eval metrics. By
+      default, `estimator.eval_dir()` will be used.
+    min_steps: `int`, stop is never requested if global step is less than this
+      value. Defaults to 0.
+    run_every_secs: If specified, calls `should_stop_fn` at an interval of
+      `run_every_secs` seconds. Defaults to 60 seconds. Either this or
+      `run_every_steps` must be set.
+    run_every_steps: If specified, calls `should_stop_fn` every
+      `run_every_steps` steps. Either this or `run_every_secs` must be set.
+
+  Returns:
+    An early-stopping hook of type `SessionRunHook` that periodically checks
+    if the given metric shows no increase over given maximum number of
+    training steps, and initiates early stopping if true.
+  """
+  return _stop_if_no_metric_improvement_hook(
+      estimator=estimator,
+      metric_name=metric_name,
+      max_steps_without_improvement=max_steps_without_increase,
+      higher_is_better=True,
+      eval_dir=eval_dir,
+      min_steps=min_steps,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+@estimator_export('estimator.experimental.stop_if_no_decrease_hook')
+def stop_if_no_decrease_hook(estimator,
+                             metric_name,
+                             max_steps_without_decrease,
+                             eval_dir=None,
+                             min_steps=0,
+                             run_every_secs=60,
+                             run_every_steps=None):
+  """Creates hook to stop if metric does not decrease within given max steps.
+
+  Usage example:
+
+  ```python
+  estimator = ...
+  # Hook to stop training if loss does not decrease in over 100000 steps.
+  hook = early_stopping.stop_if_no_decrease_hook(estimator, "loss", 100000)
+  train_spec = tf.estimator.TrainSpec(..., hooks=[hook])
+  tf.estimator.train_and_evaluate(estimator, train_spec, ...)
+  ```
+
+  Caveat: Current implementation supports early-stopping both training and
+  evaluation in local mode. In distributed mode, training can be stopped but
+  evaluation (where it's a separate job) will indefinitely wait for new model
+  checkpoints to evaluate, so you will need other means to detect and stop it.
+  Early-stopping evaluation in distributed mode requires changes in
+  `train_and_evaluate` API and will be addressed in a future revision.
+
+  Args:
+    estimator: A `tf.estimator.Estimator` instance.
+    metric_name: `str`, metric to track. "loss", "accuracy", etc.
+    max_steps_without_decrease: `int`, maximum number of training steps with no
+      decrease in the given metric.
+    eval_dir: If set, directory containing summary files with eval metrics. By
+      default, `estimator.eval_dir()` will be used.
+    min_steps: `int`, stop is never requested if global step is less than this
+      value. Defaults to 0.
+    run_every_secs: If specified, calls `should_stop_fn` at an interval of
+      `run_every_secs` seconds. Defaults to 60 seconds. Either this or
+      `run_every_steps` must be set.
+    run_every_steps: If specified, calls `should_stop_fn` every
+      `run_every_steps` steps. Either this or `run_every_secs` must be set.
+
+  Returns:
+    An early-stopping hook of type `SessionRunHook` that periodically checks
+    if the given metric shows no decrease over given maximum number of
+    training steps, and initiates early stopping if true.
+  """
+  return _stop_if_no_metric_improvement_hook(
+      estimator=estimator,
+      metric_name=metric_name,
+      max_steps_without_improvement=max_steps_without_decrease,
+      higher_is_better=False,
+      eval_dir=eval_dir,
+      min_steps=min_steps,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+def read_eval_metrics(eval_dir):
+  """Helper to read eval metrics from eval summary files.
+
+  Args:
+    eval_dir: Directory containing summary files with eval metrics.
+
+  Returns:
+    A `dict` with global steps mapping to `dict` of metric names and values.
+  """
+  eval_metrics_dict = collections.defaultdict(dict)
+  for event in _summaries(eval_dir):
+    if not event.HasField('summary'):
+      continue
+    metrics = {}
+    for value in event.summary.value:
+      if value.HasField('simple_value'):
+        metrics[value.tag] = value.simple_value
+    if metrics:
+      eval_metrics_dict[event.step].update(metrics)
+  return collections.OrderedDict(
+      sorted(eval_metrics_dict.items(), key=lambda t: t[0]))
+
+
+def _stop_if_threshold_crossed_hook(estimator, metric_name, threshold,
+                                    higher_is_better, eval_dir, min_steps,
+                                    run_every_secs, run_every_steps):
+  """Creates early-stopping hook to stop training if threshold is crossed."""
+
+  if eval_dir is None:
+    eval_dir = estimator.eval_dir()
+
+  is_lhs_better = operator.gt if higher_is_better else operator.lt
+  greater_or_lesser = 'greater than' if higher_is_better else 'less than'
+
+  def stop_if_threshold_crossed_fn():
+    """Returns `True` if the given metric crosses specified threshold."""
+
+    eval_results = read_eval_metrics(eval_dir)
+
+    for step, metrics in eval_results.items():
+      if step < min_steps:
+        continue
+      val = metrics[metric_name]
+      if is_lhs_better(val, threshold):
+        tf.compat.v1.logging.info(
+            'At step %s, metric "%s" has value %s which is %s the configured '
+            'threshold (%s) for early stopping.', step, metric_name, val,
+            greater_or_lesser, threshold)
+        return True
+    return False
+
+  return make_early_stopping_hook(
+      estimator=estimator,
+      should_stop_fn=stop_if_threshold_crossed_fn,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+def _stop_if_no_metric_improvement_hook(estimator, metric_name,
+                                        max_steps_without_improvement,
+                                        higher_is_better, eval_dir, min_steps,
+                                        run_every_secs, run_every_steps):
+  """Returns hook to stop training if given metric shows no improvement."""
+
+  if eval_dir is None:
+    eval_dir = estimator.eval_dir()
+
+  is_lhs_better = operator.gt if higher_is_better else operator.lt
+  increase_or_decrease = 'increase' if higher_is_better else 'decrease'
+
+  def stop_if_no_metric_improvement_fn():
+    """Returns `True` if metric does not improve within max steps."""
+
+    eval_results = read_eval_metrics(eval_dir)
+
+    best_val = None
+    best_val_step = None
+    for step, metrics in eval_results.items():
+      if step < min_steps:
+        continue
+      val = metrics[metric_name]
+      if best_val is None or is_lhs_better(val, best_val):
+        best_val = val
+        best_val_step = step
+      if step - best_val_step >= max_steps_without_improvement:
+        tf.compat.v1.logging.info(
+            'No %s in metric "%s" for %s steps, which is greater than or equal '
+            'to max steps (%s) configured for early stopping.',
+            increase_or_decrease, metric_name, step - best_val_step,
+            max_steps_without_improvement)
+        return True
+    return False
+
+  return make_early_stopping_hook(
+      estimator=estimator,
+      should_stop_fn=stop_if_no_metric_improvement_fn,
+      run_every_secs=run_every_secs,
+      run_every_steps=run_every_steps)
+
+
+def _summaries(eval_dir):
+  """Yields `tensorflow.Event` protos from event files in the eval dir.
+
+  Args:
+    eval_dir: Directory containing summary files with eval metrics.
+
+  Yields:
+    `tensorflow.Event` object read from the event files.
+  """
+  if tf.compat.v1.gfile.Exists(eval_dir):
+    for event_file in tf.compat.v1.gfile.Glob(
+        os.path.join(eval_dir, _EVENT_FILE_GLOB_PATTERN)):
+      try:
+        for event in tf.compat.v1.train.summary_iterator(event_file):
+          tf.compat.v1.logging.info('Yielding event: %s', event)
+          yield event
+      except tf.errors.DataLossError as e:
+        # Upon DataLossError, we ignore the rest of the file and go to the next
+        # one.
+        tf.compat.v1.logging.warning(
+            'Ignoring data corruption error encountered while reading file: '
+            '%s; original error raised by `tf.train.summary_iterator`: %s',
+            event_file, e)
+
+
+def _get_or_create_stop_var():
+  with tf.compat.v1.variable_scope(
+      name_or_scope='signal_early_stopping',
+      values=[],
+      reuse=tf.compat.v1.AUTO_REUSE):
+    return tf.compat.v1.get_variable(
+        name='STOP',
+        shape=[],
+        dtype=tf.dtypes.bool,
+        initializer=tf.compat.v1.initializers.constant(False),
+        collections=[tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
+        trainable=False)
+
+
+class _StopOnPredicateHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop when `should_stop_fn` returns `True`."""
+
+  def __init__(self, should_stop_fn, run_every_secs=60, run_every_steps=None):
+    if not callable(should_stop_fn):
+      raise TypeError('`should_stop_fn` must be callable.')
+
+    self._should_stop_fn = should_stop_fn
+    self._timer = tf.compat.v1.train.SecondOrStepTimer(
+        every_secs=run_every_secs, every_steps=run_every_steps)
+    self._global_step_tensor = None
+    self._stop_var = None
+    self._stop_op = None
+
+  def begin(self):
+    self._global_step_tensor = tf.compat.v1.train.get_global_step()
+    self._stop_var = _get_or_create_stop_var()
+    self._stop_op = tf.compat.v1.assign(self._stop_var, True)
+
+  def before_run(self, run_context):
+    del run_context
+    return tf.compat.v1.train.SessionRunArgs(self._global_step_tensor)
+
+  def after_run(self, run_context, run_values):
+    global_step = run_values.results
+    if self._timer.should_trigger_for_step(global_step):
+      self._timer.update_last_triggered_step(global_step)
+      if self._should_stop_fn():
+        tf.compat.v1.logging.info('Requesting early stopping at global step %d',
+                                  global_step)
+        run_context.session.run(self._stop_op)
+        run_context.request_stop()
+
+
+class _CheckForStoppingHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop if stop is requested by `_StopOnPredicateHook`."""
+
+  def __init__(self):
+    self._stop_var = None
+
+  def begin(self):
+    self._stop_var = _get_or_create_stop_var()
+
+  def before_run(self, run_context):
+    del run_context
+    return tf.compat.v1.train.SessionRunArgs(self._stop_var)
+
+  def after_run(self, run_context, run_values):
+    should_early_stop = run_values.results
+    if should_early_stop:
+      tf.compat.v1.logging.info('Early stopping requested, suspending run.')
+      run_context.request_stop()
+
+
+class _MultiWorkerEarlyStoppingHook(session_run_hook.SessionRunHook):
+  """Hook that requests stop when `should_stop_fn` returns `True`."""
+
+  def _get_or_create_stop_var_with_aggregation(self):
+    with variable_scope.variable_scope(
+        name_or_scope='signal_early_stopping',
+        values=[],
+        reuse=variable_scope.AUTO_REUSE):
+      return variable_scope.get_variable(
+          name='STOP',
+          shape=[],
+          dtype=tf.dtypes.int32,
+          initializer=init_ops.constant_initializer(0),
+          collections=[ops.GraphKeys.GLOBAL_VARIABLES],
+          synchronization=variable_scope.VariableSynchronization.ON_WRITE,
+          aggregation=variable_scope.VariableAggregation.SUM,
+          trainable=False)
+
+  def __init__(self, should_stop_fn, run_every_steps=None):
+    if not callable(should_stop_fn):
+      raise TypeError('`should_stop_fn` must be callable.')
+
+    self._should_stop_fn = should_stop_fn
+    self._timer = basic_session_run_hooks.SecondOrStepTimer(
+        every_secs=None, every_steps=run_every_steps)
+    self._global_step_tensor = None
+    self._stop_var = None
+    self._stop_op = None
+    self._non_stop_op = None
+
+  def begin(self):
+    self._global_step_tensor = training_util.get_global_step()
+    self._stop_var = self._get_or_create_stop_var_with_aggregation()
+    assert distribution_strategy_context.in_cross_replica_context()
+
+    strategy = distribution_strategy_context.get_strategy()
+    self._stop_placeholder = None
+
+    def stop_op_fn(var):
+      placeholder = array_ops.placeholder_with_default(
+          0, tuple(), name='stop_value')
+      if self._stop_placeholder is None:
+        self._stop_placeholder = placeholder
+      return var.assign_add(placeholder)
+
+    self._stop_op = strategy.run(
+        stop_op_fn, args=(self._stop_var,))
+
+  def before_run(self, run_context):
+    del run_context
+    return session_run_hook.SessionRunArgs({
+        'global_step': self._global_step_tensor,
+        'stop_var': self._stop_var
+    })
+
+  def after_run(self, run_context, run_values):
+    global_step = run_values.results['global_step']
+    should_early_stop = run_values.results['stop_var']
+
+    if should_early_stop > 0:
+      tf_logging.info('Early stopping requested, suspending run.')
+      run_context.request_stop()
+      return
+    if self._timer.should_trigger_for_step(global_step):
+      self._timer.update_last_triggered_step(global_step)
+      if self._should_stop_fn():
+        run_context.session.run(
+            self._stop_op, feed_dict={self._stop_placeholder: 1})
+        tf_logging.info('Requesting early stopping at global step %d',
+                        global_step)
+      else:
+        run_context.session.run(
+            self._stop_op, feed_dict={self._stop_placeholder: 0})
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator.py
new file mode 100644
index 00000000..78147724
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator.py
@@ -0,0 +1,2389 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Base Estimator class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import copy
+import os
+import tempfile
+
+import numpy as np
+import six
+import tensorflow as tf
+from google.protobuf import message
+from tensorflow.core.framework import summary_pb2
+from tensorflow.python.distribute import estimator_training as distribute_coordinator_training
+from tensorflow.python.eager import context
+from tensorflow.python.eager import monitoring
+from tensorflow.python.framework import ops
+from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.saved_model import utils_impl as saved_model_utils
+from tensorflow.python.summary import summary
+from tensorflow.python.training import basic_session_run_hooks
+from tensorflow.python.training import checkpoint_management
+from tensorflow.python.training import device_setter
+from tensorflow.python.training import evaluation
+from tensorflow.python.training import training
+from tensorflow.python.training import training_util
+from tensorflow.python.training.tracking import graph_view
+from tensorflow.python.training.tracking import util as trackable_util
+from tensorflow.python.util import compat_internal
+from tensorflow.python.util import deprecation
+from tensorflow.python.util import function_utils
+from tensorflow.python.util import tf_contextlib
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator import run_config
+from tensorflow_estimator.python.estimator import util as estimator_util
+from tensorflow_estimator.python.estimator.export import export_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+_VALID_MODEL_FN_ARGS = set(
+    ['features', 'labels', 'mode', 'params', 'self', 'config'])
+_estimator_api_gauge = monitoring.BoolGauge('/tensorflow/api/estimator',
+                                            'estimator api usage', 'method')
+
+_canned_estimator_api_gauge = monitoring.StringGauge(
+    '/tensorflow/api/estimator/canned_estimator',
+    'Gauge to track the type of canned estimator used', 'ClassType')
+
+
+@estimator_export(v1=['estimator.Estimator'])
+class Estimator(object):
+  """Estimator class to train and evaluate TensorFlow models.
+
+  The `Estimator` object wraps a model which is specified by a `model_fn`,
+  which, given inputs and a number of other parameters, returns the ops
+  necessary to perform training, evaluation, or predictions.
+
+  All outputs (checkpoints, event files, etc.) are written to `model_dir`, or a
+  subdirectory thereof. If `model_dir` is not set, a temporary directory is
+  used.
+
+  The `config` argument can be passed `tf.estimator.RunConfig` object containing
+  information about the execution environment. It is passed on to the
+  `model_fn`, if the `model_fn` has a parameter named "config" (and input
+  functions in the same manner). If the `config` parameter is not passed, it is
+  instantiated by the `Estimator`. Not passing config means that defaults useful
+  for local execution are used. `Estimator` makes config available to the model
+  (for instance, to allow specialization based on the number of workers
+  available), and also uses some of its fields to control internals, especially
+  regarding checkpointing.
+
+  The `params` argument contains hyperparameters. It is passed to the
+  `model_fn`, if the `model_fn` has a parameter named "params", and to the input
+  functions in the same manner. `Estimator` only passes params along, it does
+  not inspect it. The structure of `params` is therefore entirely up to the
+  developer.
+
+  None of `Estimator`'s methods can be overridden in subclasses (its
+  constructor enforces this). Subclasses should use `model_fn` to configure
+  the base class, and may add methods implementing specialized functionality.
+
+  See [estimators](https://tensorflow.org/guide/estimator) for more
+  information.
+
+  To warm-start an `Estimator`:
+
+  ```python
+  estimator = tf.estimator.DNNClassifier(
+      feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
+      hidden_units=[1024, 512, 256],
+      warm_start_from="/path/to/checkpoint/dir")
+  ```
+
+  For more details on warm-start configuration, see
+  `tf.estimator.WarmStartSettings`.
+
+  @compatibility(eager)
+  Calling methods of `Estimator` will work while eager execution is enabled.
+  However, the `model_fn` and `input_fn` is not executed eagerly, `Estimator`
+  will switch to graph mode before calling all user-provided functions (incl.
+  hooks), so their code has to be compatible with graph mode execution. Note
+  that `input_fn` code using `tf.data` generally works in both graph and eager
+  modes.
+  @end_compatibility
+  """
+
+  def __init__(self,
+               model_fn,
+               model_dir=None,
+               config=None,
+               params=None,
+               warm_start_from=None):
+    """Constructs an `Estimator` instance.
+
+
+
+    Args:
+      model_fn: Model function. Follows the signature:
+        * `features` -- This is the first item returned from the `input_fn`
+        passed to `train`, `evaluate`, and `predict`. This should be a
+        single `tf.Tensor` or `dict` of same.
+        * `labels` -- This is the second item returned from the `input_fn`
+        passed to `train`, `evaluate`, and `predict`. This should be a
+        single `tf.Tensor` or `dict` of same (for multi-head models). If
+        mode is `tf.estimator.ModeKeys.PREDICT`, `labels=None` will be
+        passed. If the `model_fn`'s signature does not accept `mode`, the
+        `model_fn` must still be able to handle `labels=None`.
+        * `mode` -- Optional. Specifies if this is training, evaluation or
+        prediction. See `tf.estimator.ModeKeys`.
+        `params` -- Optional `dict` of hyperparameters.  Will receive what is
+        passed to Estimator in `params` parameter. This allows to configure
+        Estimators from hyper parameter tuning.
+        * `config` -- Optional `estimator.RunConfig` object. Will receive what
+        is passed to Estimator as its `config` parameter, or a default
+        value. Allows setting up things in your `model_fn` based on
+        configuration such as `num_ps_replicas`, or `model_dir`.
+        * Returns -- `tf.estimator.EstimatorSpec`
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into an estimator to
+        continue training a previously saved model. If `PathLike` object, the
+        path will be resolved. If `None`, the model_dir in `config` will be used
+        if set. If both are set, they must be same. If both are `None`, a
+        temporary directory will be used.
+      config: `estimator.RunConfig` configuration object.
+      params: `dict` of hyper parameters that will be passed into `model_fn`.
+        Keys are names of parameters, values are basic python types.
+      warm_start_from: Optional string filepath to a checkpoint or SavedModel to
+        warm-start from, or a `tf.estimator.WarmStartSettings` object to fully
+        configure warm-starting.  If None, only TRAINABLE variables are
+        warm-started.  If the string filepath is provided instead of a
+        `tf.estimator.WarmStartSettings`, then all variables are warm-started,
+        and it is assumed that vocabularies and `tf.Tensor` names are unchanged.
+
+    Raises:
+      ValueError: parameters of `model_fn` don't match `params`.
+      ValueError: if this is called via a subclass and if that class overrides
+        a member of `Estimator`.
+    """
+    _estimator_api_gauge.get_cell('init').set(True)
+    # We do not endorse Estimator child classes to override methods in
+    # Estimator, other than a select few. You're on your own if you cleverly
+    # override the method "_assert_members_are_not_overridden".
+    self.__class__._assert_members_are_not_overridden(self)  # pylint: disable=protected-access
+
+    self._config = maybe_overwrite_model_dir_and_session_config(
+        config, model_dir)
+
+    # The distribute field contains an instance of tf.distribute.Strategy.
+    self._train_distribution = self._config.train_distribute
+    self._eval_distribution = self._config.eval_distribute
+    # Model directory.
+    self._model_dir = self._config.model_dir
+    self._session_config = self._config.session_config
+    tf.compat.v1.logging.info('Using config: %s', str(vars(self._config)))
+
+    self._device_fn = (
+        self._config.device_fn or _get_replica_device_setter(self._config))
+
+    if model_fn is None:
+      raise ValueError('model_fn must be provided to Estimator.')
+    model_fn_lib.verify_model_fn_args(model_fn, params)
+    self._model_fn = model_fn
+    self._params = copy.deepcopy(params or {})
+
+    # pylint: disable=protected-access
+    self._warm_start_settings = _get_default_warm_start_settings(
+        warm_start_from)
+    # pylint: enable=protected-access
+
+  @property
+  def model_dir(self):
+    return self._model_dir
+
+  @property
+  def config(self):
+    return copy.deepcopy(self._config)
+
+  @property
+  def params(self):
+    return copy.deepcopy(self._params)
+
+  @property
+  def model_fn(self):
+    """Returns the `model_fn` which is bound to `self.params`.
+
+    Returns:
+      The `model_fn` with following signature:
+        `def model_fn(features, labels, mode, config)`
+    """
+
+    def public_model_fn(features, labels, mode, config):
+      return self._call_model_fn(features, labels, mode, config)
+
+    return public_model_fn
+
+  # TODO(ispir): support a list of names
+  def get_variable_value(self, name):
+    """Returns value of the variable given by name.
+
+    Args:
+      name: string or a list of string, name of the tensor.
+
+    Returns:
+      Numpy array - value of the tensor.
+
+    Raises:
+      ValueError: If the `Estimator` has not produced a checkpoint yet.
+    """
+    _check_checkpoint_available(self.model_dir)
+    with context.graph_mode():
+      return tf.train.load_variable(self.model_dir, name)
+
+  def get_variable_names(self):
+    """Returns list of all variable names in this model.
+
+    Returns:
+      List of names.
+
+    Raises:
+      ValueError: If the `Estimator` has not produced a checkpoint yet.
+    """
+    _check_checkpoint_available(self.model_dir)
+    with context.graph_mode():
+      return [name for name, _ in tf.train.list_variables(self.model_dir)]
+
+  def latest_checkpoint(self):
+    """Finds the filename of the latest saved checkpoint file in `model_dir`.
+
+    Returns:
+      The full path to the latest checkpoint or `None` if no checkpoint was
+      found.
+    """
+    with context.graph_mode():
+      return checkpoint_management.latest_checkpoint(self.model_dir)
+
+  def train(self,
+            input_fn,
+            hooks=None,
+            steps=None,
+            max_steps=None,
+            saving_listeners=None):
+    """Trains a model given training data `input_fn`.
+
+    Args:
+      input_fn: A function that provides input data for training as minibatches.
+        See [Premade Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+          for more information. The function should construct and return one of
+        the following:
+          * A `tf.data.Dataset` object: Outputs of `Dataset` object must be a
+            tuple `(features, labels)` with same constraints as below.
+          * A tuple `(features, labels)`: Where `features` is a `tf.Tensor` or a
+            dictionary of string feature name to `Tensor` and `labels` is a
+            `Tensor` or a dictionary of string label name to `Tensor`. Both
+            `features` and `labels` are consumed by `model_fn`. They should
+            satisfy the expectation of `model_fn` from inputs.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the training loop.
+      steps: Number of steps for which to train the model. If `None`, train
+        forever or train until `input_fn` generates the `tf.errors.OutOfRange`
+        error or `StopIteration` exception. `steps` works incrementally. If you
+        call two times `train(steps=10)` then training occurs in total 20 steps.
+        If `OutOfRange` or `StopIteration` occurs in the middle, training stops
+        before 20 steps. If you don't want to have incremental behavior please
+        set `max_steps` instead. If set, `max_steps` must be `None`.
+      max_steps: Number of total steps for which to train model. If `None`,
+        train forever or train until `input_fn` generates the
+        `tf.errors.OutOfRange` error or `StopIteration` exception. If set,
+        `steps` must be `None`. If `OutOfRange` or `StopIteration` occurs in the
+        middle, training stops before `max_steps` steps. Two calls to
+        `train(steps=100)` means 200 training iterations. On the other hand, two
+        calls to `train(max_steps=100)` means that the second call will not do
+        any iteration since first call did all 100 steps.
+      saving_listeners: list of `CheckpointSaverListener` objects. Used for
+        callbacks that run immediately before or after checkpoint savings.
+
+    Returns:
+      `self`, for chaining.
+
+    Raises:
+      ValueError: If both `steps` and `max_steps` are not `None`.
+      ValueError: If either `steps` or `max_steps <= 0`.
+    """
+    _estimator_api_gauge.get_cell('train').set(True)
+    if self.config.task_type in (run_config.TaskType.EVALUATOR,
+                                 run_config.TaskType.PS):
+      raise ValueError(
+          'Train has been called wrong configuration. Please use '
+          'tf.estimator.train_and_evaluate which calls proper API according '
+          'to given configuration. Current configuration: {}.'.format(
+              self.config))
+
+    with context.graph_mode():
+      if (steps is not None) and (max_steps is not None):
+        raise ValueError('Can not provide both steps and max_steps.')
+      if steps is not None and steps <= 0:
+        raise ValueError('Must specify steps > 0, given: {}'.format(steps))
+      if max_steps is not None and max_steps <= 0:
+        raise ValueError(
+            'Must specify max_steps > 0, given: {}'.format(max_steps))
+
+      if max_steps is not None:
+        start_step = _load_global_step_from_checkpoint_dir(self._model_dir)
+        if max_steps <= start_step:
+          logging.info('Skipping training since max_steps has already saved.')
+          return self
+
+      hooks = _check_hooks_type(hooks)
+      hooks.extend(self._convert_train_steps_to_hooks(steps, max_steps))
+
+      saving_listeners = _check_listeners_type(saving_listeners)
+      loss = self._train_model(input_fn, hooks, saving_listeners)
+      logging.info('Loss for final step: %s.', loss)
+      return self
+
+  def _convert_train_steps_to_hooks(self, steps, max_steps):
+    """Create hooks to run correct number of steps in training.
+
+    Args:
+      steps: number of steps to run during training.
+      max_steps: maximum number of steps to be run during training. It'll be the
+        maximum number of steps the model will train to after restoring from
+        checkpoint even across multiple estimator.train calls.
+
+    Returns:
+      List of hooks to be passed to the estimator.
+    """
+    if steps is not None or max_steps is not None:
+      if self._train_distribution:
+        steps_per_run = getattr(self._train_distribution.extended,
+                                'steps_per_run', 1)
+        if steps_per_run > 1:
+          return [
+              basic_session_run_hooks._MultiStepStopAtStepHook(  # pylint: disable=protected-access
+                  steps, max_steps, steps_per_run)
+          ]
+      return [tf.compat.v1.train.StopAtStepHook(steps, max_steps)]
+    else:
+      return []
+
+  def eval_dir(self, name=None):
+    """Shows the directory name where evaluation metrics are dumped.
+
+    Args:
+      name: Name of the evaluation if user needs to run multiple evaluations on
+        different data sets, such as on training data vs test data. Metrics for
+        different evaluations are saved in separate folders, and appear
+        separately in tensorboard.
+
+    Returns:
+      A string which is the path of directory contains evaluation metrics.
+    """
+    return os.path.join(self._model_dir, 'eval' if not name else 'eval_' + name)
+
+  def evaluate(self,
+               input_fn,
+               steps=None,
+               hooks=None,
+               checkpoint_path=None,
+               name=None):
+    """Evaluates the model given evaluation data `input_fn`.
+
+    For each step, calls `input_fn`, which returns one batch of data.
+    Evaluates until:
+    - `steps` batches are processed, or
+    - `input_fn` raises an end-of-input exception (`tf.errors.OutOfRangeError`
+    or `StopIteration`).
+
+    Args:
+      input_fn: A function that constructs the input data for evaluation. See
+        [Premade Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+        for more information. The function should construct and return one of
+        the following:
+        * A `tf.data.Dataset` object: Outputs of `Dataset` object must be a
+          tuple `(features, labels)` with same constraints as below.
+        * A tuple `(features, labels)`: Where `features` is a `tf.Tensor` or a
+          dictionary of string feature name to `Tensor` and `labels` is a
+          `Tensor` or a dictionary of string label name to `Tensor`. Both
+          `features` and `labels` are consumed by `model_fn`. They should
+          satisfy the expectation of `model_fn` from inputs.
+      steps: Number of steps for which to evaluate model. If `None`, evaluates
+        until `input_fn` raises an end-of-input exception.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the evaluation call.
+      checkpoint_path: Path of a specific checkpoint to evaluate. If `None`, the
+        latest checkpoint in `model_dir` is used.  If there are no checkpoints
+        in `model_dir`, evaluation is run with newly initialized `Variables`
+        instead of ones restored from checkpoint.
+      name: Name of the evaluation if user needs to run multiple evaluations on
+        different data sets, such as on training data vs test data. Metrics for
+        different evaluations are saved in separate folders, and appear
+        separately in tensorboard.
+
+    Returns:
+      A dict containing the evaluation metrics specified in `model_fn` keyed by
+      name, as well as an entry `global_step` which contains the value of the
+      global step for which this evaluation was performed. For canned
+      estimators, the dict contains the `loss` (mean loss per mini-batch) and
+      the `average_loss` (mean loss per sample). Canned classifiers also return
+      the `accuracy`. Canned regressors also return the `label/mean` and the
+      `prediction/mean`.
+
+    Raises:
+      ValueError: If `steps <= 0`.
+    """
+    _estimator_api_gauge.get_cell('evaluate').set(True)
+    # pylint: disable=protected-access
+    if (self._eval_distribution and
+        hasattr(self._config, '_distribute_coordinator_mode') and
+        self._config._distribute_coordinator_mode):
+      return distribute_coordinator_training.estimator_evaluate(
+          self,
+          lambda est, s, eval_hooks: est._actual_eval(  # pylint: disable=g-long-lambda
+              input_fn,
+              strategy=s,
+              steps=steps,
+              hooks=eval_hooks,
+              checkpoint_path=checkpoint_path,
+              name=name),
+          hooks)
+    # pylint: enable=protected-access
+    else:
+      return self._actual_eval(
+          input_fn,
+          strategy=self._eval_distribution,
+          steps=steps,
+          hooks=hooks,
+          checkpoint_path=checkpoint_path,
+          name=name)
+
+  def _actual_eval(self,
+                   input_fn,
+                   strategy=None,
+                   steps=None,
+                   hooks=None,
+                   checkpoint_path=None,
+                   name=None):
+    """The method that does evaluation actually."""
+    with context.graph_mode():
+      hooks = _check_hooks_type(hooks)
+      hooks.extend(self._convert_eval_steps_to_hooks(steps))
+
+      # Check that model has been trained (if nothing has been set explicitly).
+      if not checkpoint_path:
+        latest_path = checkpoint_management.latest_checkpoint(self._model_dir)
+        if not latest_path:
+          tf.compat.v1.logging.info(
+              'Could not find trained model in model_dir: {}, running '
+              'initialization to evaluate.'.format(self._model_dir))
+        checkpoint_path = latest_path
+
+      def _evaluate():
+        (scaffold, update_op, eval_dict, all_hooks) = (
+            self._evaluate_build_graph(input_fn, hooks, checkpoint_path))
+        return self._evaluate_run(
+            checkpoint_path=checkpoint_path,
+            scaffold=scaffold,
+            update_op=update_op,
+            eval_dict=eval_dict,
+            all_hooks=all_hooks,
+            output_dir=self.eval_dir(name))
+
+      with tf.Graph().as_default():
+        if strategy:
+          # We want to create the iterations variable outside the distribution
+          # scope as that is just stored on the host and mainly used to drive
+          # the loop and doesn't need to be a Mirrored/Device variable.
+          training.get_or_create_steps_per_run_variable()
+          with strategy.scope():
+            return _evaluate()
+        else:
+          return _evaluate()
+
+  def _convert_eval_steps_to_hooks(self, steps):
+    """Create hooks to run correct number of steps in evaluation.
+
+    Args:
+      steps: number of steps to run during evaluation.
+
+    Raises:
+      ValueError: if steps is less than or equal to zero.
+
+    Returns:
+      List of hooks to be passed to the estimator.
+    """
+    if steps is None:
+      return []
+
+    if steps <= 0:
+      raise ValueError('Must specify steps > 0, given: {}'.format(steps))
+
+    # The hooks are declared as private in evaluation.py discourage the use
+    # by other libraries or open source users. This should be the only usage
+    # of the estimator evaluation hooks.
+    if self._eval_distribution:
+      steps_per_run = getattr(self._eval_distribution.extended, 'steps_per_run',
+                              1)
+      if steps_per_run > 1:
+        return [
+            evaluation._MultiStepStopAfterNEvalsHook(  # pylint: disable=protected-access
+                num_evals=steps,
+                steps_per_run=steps_per_run)
+        ]
+    return [evaluation._StopAfterNEvalsHook(num_evals=steps)]  # pylint: disable=protected-access
+
+  def predict(self,
+              input_fn,
+              predict_keys=None,
+              hooks=None,
+              checkpoint_path=None,
+              yield_single_examples=True):
+    """Yields predictions for given features.
+
+    Please note that interleaving two predict outputs does not work. See:
+    [issue/20506](
+    https://github.com/tensorflow/tensorflow/issues/20506#issuecomment-422208517)
+
+    Args:
+      input_fn: A function that constructs the features. Prediction continues
+        until `input_fn` raises an end-of-input exception
+        (`tf.errors.OutOfRangeError` or `StopIteration`). See [Premade
+        Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+        for more information. The function should construct and return one of
+        the following:
+        * `tf.data.Dataset` object -- Outputs of `Dataset` object must have
+          same constraints as below.
+        * features -- A `tf.Tensor` or a dictionary of string feature name to
+          `Tensor`. features are consumed by `model_fn`. They should satisfy
+          the expectation of `model_fn` from inputs.
+        * A tuple, in which case
+          the first item is extracted as features.
+      predict_keys: list of `str`, name of the keys to predict. It is used if
+        the `tf.estimator.EstimatorSpec.predictions` is a `dict`. If
+        `predict_keys` is used then rest of the predictions will be filtered
+        from the dictionary. If `None`, returns all.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the prediction call.
+      checkpoint_path: Path of a specific checkpoint to predict. If `None`, the
+        latest checkpoint in `model_dir` is used.  If there are no checkpoints
+        in `model_dir`, prediction is run with newly initialized `Variables`
+        instead of ones restored from checkpoint.
+      yield_single_examples: If `False`, yields the whole batch as returned by
+        the `model_fn` instead of decomposing the batch into individual
+        elements. This is useful if `model_fn` returns some tensors whose first
+        dimension is not equal to the batch size.
+
+    Yields:
+      Evaluated values of `predictions` tensors.
+
+    Raises:
+      ValueError: If batch length of predictions is not the same and
+        `yield_single_examples` is `True`.
+      ValueError: If there is a conflict between `predict_keys` and
+        `predictions`. For example if `predict_keys` is not `None` but
+        `tf.estimator.EstimatorSpec.predictions` is not a `dict`.
+    """
+    _estimator_api_gauge.get_cell('predict').set(True)
+    with context.graph_mode():
+      hooks = _check_hooks_type(hooks)
+      # Check that model has been trained.
+      if not checkpoint_path:
+        checkpoint_path = checkpoint_management.latest_checkpoint(
+            self._model_dir)
+      if not checkpoint_path:
+        tf.compat.v1.logging.info(
+            'Could not find trained model in model_dir: {}, running '
+            'initialization to predict.'.format(self._model_dir))
+      with tf.Graph().as_default() as g:
+        tf.compat.v1.random.set_random_seed(self._config.tf_random_seed)
+        self._create_and_assert_global_step(g)
+        features, input_hooks = self._get_features_from_input_fn(
+            input_fn, ModeKeys.PREDICT)
+        estimator_spec = self._call_model_fn(features, None, ModeKeys.PREDICT,
+                                             self.config)
+
+        # Call to warm_start has to be after model_fn is called.
+        self._maybe_warm_start(checkpoint_path)
+
+        predictions = self._extract_keys(estimator_spec.predictions,
+                                         predict_keys)
+        all_hooks = list(input_hooks)
+        all_hooks.extend(hooks)
+        all_hooks.extend(list(estimator_spec.prediction_hooks or []))
+        with tf.compat.v1.train.MonitoredSession(
+            session_creator=tf.compat.v1.train.ChiefSessionCreator(
+                checkpoint_filename_with_path=checkpoint_path,
+                master=self._config.master,
+                scaffold=estimator_spec.scaffold,
+                config=self._session_config),
+            hooks=all_hooks) as mon_sess:
+          while not mon_sess.should_stop():
+            preds_evaluated = mon_sess.run(predictions)
+            if not yield_single_examples:
+              yield preds_evaluated
+            elif not isinstance(predictions, dict):
+              for pred in preds_evaluated:
+                yield pred
+            else:
+              for i in range(self._extract_batch_length(preds_evaluated)):
+                yield {
+                    key: value[i]
+                    for key, value in six.iteritems(preds_evaluated)
+                }
+
+  def _assert_members_are_not_overridden(self):
+    """Asserts members of `Estimator` are not overridden."""
+    _assert_members_are_not_overridden(Estimator, self)
+
+  def export_saved_model(self,
+                         export_dir_base,
+                         serving_input_receiver_fn,
+                         assets_extra=None,
+                         as_text=False,
+                         checkpoint_path=None,
+                         experimental_mode=ModeKeys.PREDICT):
+    # pylint: disable=line-too-long
+    """Exports inference graph as a `SavedModel` into the given dir.
+
+    For a detailed guide on SavedModel, see
+    [Using the SavedModel format]
+    (https://tensorflow.org/guide/saved_model#savedmodels_from_estimators).
+
+    This method builds a new graph by first calling the
+    `serving_input_receiver_fn` to obtain feature `Tensor`s, and then calling
+    this `Estimator`'s `model_fn` to generate the model graph based on those
+    features. It restores the given checkpoint (or, lacking that, the most
+    recent checkpoint) into this graph in a fresh session.  Finally it creates
+    a timestamped export directory below the given `export_dir_base`, and writes
+    a `SavedModel` into it containing a single `tf.MetaGraphDef` saved from this
+    session.
+
+    The exported `MetaGraphDef` will provide one `SignatureDef` for each
+    element of the `export_outputs` dict returned from the `model_fn`, named
+    using the same keys.  One of these keys is always
+    `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`,
+    indicating which signature will be served when a serving request does not
+    specify one. For each signature, the outputs are provided by the
+    corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always
+    the input receivers provided by the `serving_input_receiver_fn`.
+
+    Extra assets may be written into the `SavedModel` via the `assets_extra`
+    argument.  This should be a dict, where each key gives a destination path
+    (including the filename) relative to the assets.extra directory.  The
+    corresponding value gives the full path of the source file to be copied.
+    For example, the simple case of copying a single file without renaming it
+    is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+
+    The experimental_mode parameter can be used to export a single
+    train/eval/predict graph as a `SavedModel`.
+    See `experimental_export_all_saved_models` for full docs.
+
+    Args:
+      export_dir_base: A string containing a directory in which to create
+        timestamped subdirectories containing exported `SavedModel`s.
+      serving_input_receiver_fn: A function that takes no argument and returns a
+        `tf.estimator.export.ServingInputReceiver` or
+        `tf.estimator.export.TensorServingInputReceiver`.
+      assets_extra: A dict specifying how to populate the assets.extra directory
+        within the exported `SavedModel`, or `None` if no extra assets are
+        needed.
+      as_text: whether to write the `SavedModel` proto in text format.
+      checkpoint_path: The checkpoint path to export.  If `None` (the default),
+        the most recent checkpoint found within the model directory is chosen.
+      experimental_mode: `tf.estimator.ModeKeys` value indicating with mode will
+        be exported. Note that this feature is experimental.
+
+    Returns:
+      The path to the exported directory as a bytes object.
+
+    Raises:
+      ValueError: if no `serving_input_receiver_fn` is provided, no
+      `export_outputs` are provided, or no checkpoint can be found.
+    """
+    # pylint: enable=line-too-long
+    if not serving_input_receiver_fn:
+      raise ValueError('An input_receiver_fn must be defined.')
+
+    input_receiver_fn_map = {experimental_mode: serving_input_receiver_fn}
+
+    return self._export_all_saved_models(
+        export_dir_base,
+        input_receiver_fn_map,
+        assets_extra=assets_extra,
+        as_text=as_text,
+        checkpoint_path=checkpoint_path,
+        strip_default_attrs=True)
+
+  def experimental_export_all_saved_models(self,
+                                           export_dir_base,
+                                           input_receiver_fn_map,
+                                           assets_extra=None,
+                                           as_text=False,
+                                           checkpoint_path=None):
+    """Exports a `SavedModel` with `tf.MetaGraphDefs` for each requested mode.
+
+    For each mode passed in via the `input_receiver_fn_map`,
+    this method builds a new graph by calling the `input_receiver_fn` to obtain
+    feature and label `Tensor`s. Next, this method calls the `Estimator`'s
+    `model_fn` in the passed mode to generate the model graph based on
+    those features and labels, and restores the given checkpoint
+    (or, lacking that, the most recent checkpoint) into the graph.
+    Only one of the modes is used for saving variables to the `SavedModel`
+    (order of preference: `tf.estimator.ModeKeys.TRAIN`,
+    `tf.estimator.ModeKeys.EVAL`, then
+    `tf.estimator.ModeKeys.PREDICT`), such that up to three
+    `tf.MetaGraphDefs` are saved with a single set of variables in a single
+    `SavedModel` directory.
+
+    For the variables and `tf.MetaGraphDefs`, a timestamped export directory
+    below `export_dir_base`, and writes a `SavedModel` into it containing the
+    `tf.MetaGraphDef` for the given mode and its associated signatures.
+
+    For prediction, the exported `MetaGraphDef` will provide one `SignatureDef`
+    for each element of the `export_outputs` dict returned from the `model_fn`,
+    named using the same keys.  One of these keys is always
+    `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`,
+    indicating which signature will be served when a serving request does not
+    specify one. For each signature, the outputs are provided by the
+    corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always
+    the input receivers provided by the `serving_input_receiver_fn`.
+
+    For training and evaluation, the `train_op` is stored in an extra
+    collection, and loss, metrics, and predictions are included in a
+    `SignatureDef` for the mode in question.
+
+    Extra assets may be written into the `SavedModel` via the `assets_extra`
+    argument.  This should be a dict, where each key gives a destination path
+    (including the filename) relative to the assets.extra directory.  The
+    corresponding value gives the full path of the source file to be copied.
+    For example, the simple case of copying a single file without renaming it
+    is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+
+    Args:
+      export_dir_base: A string containing a directory in which to create
+        timestamped subdirectories containing exported `SavedModel`s.
+      input_receiver_fn_map: dict of `tf.estimator.ModeKeys` to
+        `input_receiver_fn` mappings, where the `input_receiver_fn` is a
+        function that takes no arguments and returns the appropriate subclass of
+        `InputReceiver`.
+      assets_extra: A dict specifying how to populate the assets.extra directory
+        within the exported `SavedModel`, or `None` if no extra assets are
+        needed.
+      as_text: whether to write the `SavedModel` proto in text format.
+      checkpoint_path: The checkpoint path to export.  If `None` (the default),
+        the most recent checkpoint found within the model directory is chosen.
+
+    Returns:
+      The path to the exported directory as a bytes object.
+
+    Raises:
+      ValueError: if any `input_receiver_fn` is `None`, no `export_outputs`
+        are provided, or no checkpoint can be found.
+    """
+    return self._export_all_saved_models(
+        export_dir_base,
+        input_receiver_fn_map,
+        assets_extra=assets_extra,
+        as_text=as_text,
+        checkpoint_path=checkpoint_path,
+        strip_default_attrs=True)
+
+  def _export_all_saved_models(self,
+                               export_dir_base,
+                               input_receiver_fn_map,
+                               assets_extra=None,
+                               as_text=False,
+                               checkpoint_path=None,
+                               strip_default_attrs=True):
+    """Exports multiple modes in the model function to a SavedModel."""
+    # TODO(b/65561022): Consider allowing multiple input_receiver_fns per mode.
+    with context.graph_mode():
+      if not checkpoint_path:
+        # Locate the latest checkpoint
+        checkpoint_path = self.latest_checkpoint()
+      if not checkpoint_path:
+        if self._warm_start_settings:
+          checkpoint_path = self._warm_start_settings.ckpt_to_initialize_from
+          if tf.compat.v1.gfile.IsDirectory(checkpoint_path):
+            checkpoint_path = tf.train.latest_checkpoint(checkpoint_path)
+        else:
+          raise ValueError("Couldn't find trained model at {}.".format(
+              self._model_dir))
+
+      export_dir = export_lib.get_timestamped_export_dir(export_dir_base)
+      temp_export_dir = export_lib.get_temp_export_dir(export_dir)
+
+      builder = tf.compat.v1.saved_model.Builder(temp_export_dir)
+
+      save_variables = True
+      # Note that the order in which we run here matters, as the first
+      # mode we pass through will be used to save the variables. We run TRAIN
+      # first, as that is also the mode used for checkpoints, and therefore
+      # we are not likely to have vars in PREDICT that are not in the checkpoint
+      # created by TRAIN.
+      if input_receiver_fn_map.get(ModeKeys.TRAIN):
+        self._add_meta_graph_for_mode(
+            builder,
+            input_receiver_fn_map,
+            checkpoint_path,
+            save_variables,
+            mode=ModeKeys.TRAIN,
+            strip_default_attrs=strip_default_attrs)
+        save_variables = False
+      if input_receiver_fn_map.get(ModeKeys.EVAL):
+        self._add_meta_graph_for_mode(
+            builder,
+            input_receiver_fn_map,
+            checkpoint_path,
+            save_variables,
+            mode=ModeKeys.EVAL,
+            strip_default_attrs=strip_default_attrs)
+        save_variables = False
+      if input_receiver_fn_map.get(ModeKeys.PREDICT):
+        self._add_meta_graph_for_mode(
+            builder,
+            input_receiver_fn_map,
+            checkpoint_path,
+            save_variables,
+            mode=ModeKeys.PREDICT,
+            strip_default_attrs=strip_default_attrs)
+        save_variables = False
+
+      if save_variables:
+        raise ValueError('No valid modes for exporting found. Got {}.'.format(
+            input_receiver_fn_map.keys()))
+
+      builder.save(as_text)
+
+      # Add the extra assets
+      if assets_extra:
+        assets_extra_path = os.path.join(
+            tf.compat.as_bytes(temp_export_dir),
+            tf.compat.as_bytes('assets.extra'))
+        for dest_relative, source in assets_extra.items():
+          dest_absolute = os.path.join(
+              tf.compat.as_bytes(assets_extra_path),
+              tf.compat.as_bytes(dest_relative))
+          dest_path = os.path.dirname(dest_absolute)
+          tf.compat.v1.gfile.MakeDirs(dest_path)
+          tf.compat.v1.gfile.Copy(source, dest_absolute)
+
+      tf.compat.v1.gfile.Rename(temp_export_dir, export_dir)
+      return export_dir
+
+  def _add_meta_graph_for_mode(self,
+                               builder,
+                               input_receiver_fn_map,
+                               checkpoint_path,
+                               save_variables=True,
+                               mode=ModeKeys.PREDICT,
+                               export_tags=None,
+                               check_variables=True,
+                               strip_default_attrs=True):
+    """Loads variables and adds them along with a `tf.MetaGraphDef` for saving.
+
+    Args:
+      builder: instance of `tf.saved_modle.builder.SavedModelBuilder` that will
+        be used for saving.
+      input_receiver_fn_map: dict of `tf.estimator.ModeKeys` to
+        `input_receiver_fn` mappings, where the `input_receiver_fn` is a
+        function that takes no argument and returns the appropriate subclass of
+        `InputReceiver`.
+      checkpoint_path: The checkpoint path to export.
+      save_variables: bool, whether variables should be saved. If `False`, just
+        the `tf.MetaGraphDef` will be saved. Note that `save_variables` should
+        only be `True` for the first call to this function, and the
+        `SavedModelBuilder` will raise an error if that is not the case.
+      mode: `tf.estimator.ModeKeys` value indicating which mode will be
+        exported.
+      export_tags: The set of tags with which to save `tf.MetaGraphDef`. If
+        `None`, a default set will be selected to matched the passed mode.
+      check_variables: bool, whether to check the checkpoint has all variables.
+      strip_default_attrs: bool, whether to strip default attributes. This may
+        only be True when called from the deprecated V1
+        Estimator.export_savedmodel.
+
+    Raises:
+      ValueError: if `save_variables` is `True` and `check_variable` is `False`.
+    """
+    if export_tags is None:
+      export_tags = export_lib.EXPORT_TAG_MAP[mode]
+    input_receiver_fn = input_receiver_fn_map[mode]
+
+    with tf.Graph().as_default() as g:
+      self._create_and_assert_global_step(g)
+      tf.compat.v1.random.set_random_seed(self._config.tf_random_seed)
+
+      input_receiver = input_receiver_fn()
+
+      # Call the model_fn and collect the export_outputs.
+      estimator_spec = self._call_model_fn(
+          features=input_receiver.features,
+          labels=getattr(input_receiver, 'labels', None),
+          mode=mode,
+          config=self.config)
+
+      export_outputs = export_lib.export_outputs_for_mode(
+          mode=estimator_spec.mode,
+          serving_export_outputs=estimator_spec.export_outputs,
+          predictions=estimator_spec.predictions,
+          loss=estimator_spec.loss,
+          metrics=estimator_spec.eval_metric_ops)
+
+      # Build the SignatureDefs from receivers and all outputs
+      signature_def_map = export_lib.build_all_signature_defs(
+          input_receiver.receiver_tensors,
+          export_outputs,
+          getattr(input_receiver, 'receiver_tensors_alternatives', None),
+          serving_only=(mode == ModeKeys.PREDICT))
+
+      with tf.compat.v1.Session(config=self._session_config) as session:
+
+        if estimator_spec.scaffold.local_init_op is not None:
+          local_init_op = estimator_spec.scaffold.local_init_op
+        else:
+          local_init_op = tf.compat.v1.train.Scaffold.default_local_init_op()
+
+        # This saver will be used both for restoring variables now,
+        # and in saving out the metagraph below. This ensures that any
+        # Custom Savers stored with the Scaffold are passed through to the
+        # SavedModel for restore later.
+        if isinstance(estimator_spec.scaffold.saver, trackable_util.Checkpoint):
+          graph_saver = tf.compat.v1.train.Saver(
+              var_list=graph_view.ObjectGraphView(
+                  estimator_spec.scaffold.saver).frozen_saveable_objects(),
+              sharded=True)
+        else:
+          graph_saver = (
+              estimator_spec.scaffold.saver or
+              tf.compat.v1.train.Saver(sharded=True))
+
+        if save_variables and not check_variables:
+          raise ValueError('If `save_variables` is `True, `check_variables`'
+                           'must not be `False`.')
+        if check_variables:
+          try:
+            graph_saver.restore(session, checkpoint_path)
+          except tf.errors.NotFoundError as e:
+            msg = ('Could not load all requested variables from checkpoint. '
+                   'Please make sure your model_fn does not expect variables '
+                   'that were not saved in the checkpoint.\n\n'
+                   'Encountered error with mode `{}` while restoring '
+                   'checkpoint from: `{}`. Full Traceback:\n\n{}').format(
+                       mode, checkpoint_path, e)
+            raise ValueError(msg)
+
+        # We add the train op explicitly for now, so that we don't have to
+        # change the Builder public interface. Note that this is a no-op
+        # for prediction, where train_op is None.
+        builder._add_train_op(estimator_spec.train_op)  # pylint: disable=protected-access
+
+        meta_graph_kwargs = dict(
+            tags=export_tags,
+            signature_def_map=signature_def_map,
+            assets_collection=tf.compat.v1.get_collection(
+                tf.compat.v1.GraphKeys.ASSET_FILEPATHS),
+            main_op=local_init_op,
+            saver=graph_saver,
+            strip_default_attrs=strip_default_attrs)
+
+        if save_variables:
+          builder.add_meta_graph_and_variables(session, **meta_graph_kwargs)
+        else:
+          builder.add_meta_graph(**meta_graph_kwargs)
+
+  def _get_features_from_input_fn(self, input_fn, mode):
+    """Extracts the `features` from return values of `input_fn`."""
+    result = self._call_input_fn(input_fn, mode)
+    result, _, hooks = estimator_util.parse_input_fn_result(result)
+    self._validate_features_in_predict_input(result)
+    return result, hooks
+
+  def _validate_features_in_predict_input(self, result):
+    if not _has_dataset_or_queue_runner(result):
+      logging.warning('Input graph does not use tf.data.Dataset or contain a '
+                      'QueueRunner. That means predict yields forever. '
+                      'This is probably a mistake.')
+
+  def _get_iterator_from_input_fn(self, input_fn, mode, distribution=None):
+    """Calls `input_fn` and returns an iterator."""
+    if distribution is not None:
+      # pylint: disable=g-long-lambda
+      iterator = distribution.make_input_fn_iterator(
+          lambda input_context: self._call_input_fn(input_fn, mode,
+                                                    input_context))
+      input_hooks = [
+          estimator_util.DistributedIteratorInitializerHook(iterator)
+      ]
+    else:
+      result = self._call_input_fn(input_fn, mode)
+      iterator = result.make_initializable_iterator()
+      input_hooks = [estimator_util._DatasetInitializerHook(iterator)]  # pylint: disable=protected-access
+    return iterator, input_hooks
+
+  def _get_features_and_labels_from_input_fn(self, input_fn, mode):
+    """Extracts the `features` and labels from return values of `input_fn`."""
+    return estimator_util.parse_input_fn_result(
+        self._call_input_fn(input_fn, mode))
+
+  def _extract_batch_length(self, preds_evaluated):
+    """Extracts batch length of predictions."""
+    batch_length = None
+    for key, value in six.iteritems(preds_evaluated):
+      batch_length = batch_length or value.shape[0]
+      if value.shape[0] != batch_length:
+        raise ValueError('Batch length of predictions should be same. %s has '
+                         'different batch length than others.' % key)
+    return batch_length
+
+  def _extract_keys(self, predictions, predict_keys):
+    """Extracts `predict_keys` from `predictions`."""
+    if not predict_keys:
+      return predictions
+    if not isinstance(predictions, dict):
+      raise ValueError(
+          'predict_keys argument is not valid in case of non-dict predictions.')
+    existing_keys = predictions.keys()
+    predictions = {
+        key: value
+        for key, value in six.iteritems(predictions)
+        if key in predict_keys
+    }
+    if not predictions:
+      raise ValueError('Expected to run at least one output from %s, '
+                       'provided %s.' % (existing_keys, predict_keys))
+    return predictions
+
+  def _create_global_step(self, graph):
+    """Creates the global step tensor in graph.
+
+    The global step tensor must be an integer type with name 'global_step' and
+    be added to the collection `tf.GraphKeys.GLOBAL_STEP`.
+
+    Args:
+      graph: The graph in which to create the global step tensor.
+
+    Returns:
+      The global step `tf.Tensor`.
+    """
+    return tf.compat.v1.train.create_global_step(graph)
+
+  def _create_and_assert_global_step(self, graph):
+    """Creates and asserts properties of the global step.
+
+    Args:
+      graph: The graph in which to create the global step tensor.
+
+    Returns:
+      The global step `tf.Tensor`.
+    """
+    step = self._create_global_step(graph)
+    assert step is tf.compat.v1.train.get_global_step()
+    assert step.dtype.is_integer
+    return step
+
+  def _call_input_fn(self, input_fn, mode, input_context=None):
+    """Calls the input function.
+
+    Args:
+      input_fn: The input function.
+      mode: `tf.estimator.ModeKeys`
+
+    Returns:
+      The return value of the passed `input_fn`, which should be one of:
+
+        * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
+          tuple `(features, labels)` with same constraints as below.
+        * A tuple `(features, labels)`: Where `features` is a `Tensor` or a
+          dictionary of string feature name to `Tensor` and `labels` is a
+          `Tensor` or a dictionary of string label name to `Tensor`. Both
+          `features` and `labels` are consumed by `model_fn`. They should
+          satisfy the expectation of `model_fn` from inputs.
+
+    Raises:
+      ValueError: if `input_fn` takes invalid arguments.
+    """
+    input_fn_args = function_utils.fn_args(input_fn)
+    kwargs = {}
+    if 'mode' in input_fn_args:
+      kwargs['mode'] = mode
+    if 'params' in input_fn_args:
+      kwargs['params'] = self.params
+    if 'config' in input_fn_args:
+      kwargs['config'] = self.config
+    if input_context and 'input_context' in input_fn_args:
+      tf.compat.v1.logging.info(
+          'The `input_fn` accepts an `input_context` which will '
+          'be given by DistributionStrategy')
+      kwargs['input_context'] = input_context
+    with tf.compat.v1.device('/cpu:0'):
+      return input_fn(**kwargs)
+
+  def _call_model_fn(self, features, labels, mode, config):
+    """Calls model function.
+
+    Args:
+      features: features dict.
+      labels: labels dict.
+      mode: `tf.estimator.ModeKeys`
+      config: `tf.estimator.RunConfig`
+
+    Returns:
+      An `tf.estimator.EstimatorSpec` object.
+
+    Raises:
+      ValueError: if `model_fn` returns invalid objects.
+    """
+    model_fn_args = function_utils.fn_args(self._model_fn)
+    kwargs = {}
+    if 'labels' in model_fn_args:
+      kwargs['labels'] = labels
+    else:
+      if labels is not None:
+        raise ValueError(
+            'model_fn does not take labels, but input_fn returns labels.')
+    if 'mode' in model_fn_args:
+      kwargs['mode'] = mode
+    if 'params' in model_fn_args:
+      kwargs['params'] = self.params
+    if 'config' in model_fn_args:
+      kwargs['config'] = config
+
+    logging.info('Calling model_fn.')
+    model_fn_results = self._model_fn(features=features, **kwargs)
+    logging.info('Done calling model_fn.')
+
+    if not isinstance(model_fn_results, model_fn_lib.EstimatorSpec):
+      raise ValueError('model_fn should return an EstimatorSpec.')
+
+    return model_fn_results
+
+  def _train_model(self, input_fn, hooks, saving_listeners):
+    if self._train_distribution:
+      return self._train_model_distributed(input_fn, hooks, saving_listeners)
+    else:
+      return self._train_model_default(input_fn, hooks, saving_listeners)
+
+  def _train_model_default(self, input_fn, hooks, saving_listeners):
+    """Initiate training with `input_fn`, without `DistributionStrategies`.
+
+    Args:
+      input_fn: A function that provides input data for training as minibatches.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the training loop.
+      saving_listeners: list of `tf.train.CheckpointSaverListener` objects. Used
+        for callbacks that run immediately before or after checkpoint savings.
+
+    Returns:
+      Loss from training
+    """
+    worker_hooks = []
+    with tf.Graph().as_default() as g, g.device(self._device_fn):
+      tf.compat.v1.random.set_random_seed(self._config.tf_random_seed)
+      global_step_tensor = self._create_and_assert_global_step(g)
+
+      # Skip creating a read variable if _create_and_assert_global_step
+      # returns None (e.g. tf.contrib.estimator.SavedModelEstimator).
+      if global_step_tensor is not None:
+        training_util._get_or_create_global_step_read(g)  # pylint: disable=protected-access
+
+      features, labels, input_hooks = (
+          self._get_features_and_labels_from_input_fn(input_fn, ModeKeys.TRAIN))
+      worker_hooks.extend(input_hooks)
+      estimator_spec = self._call_model_fn(features, labels, ModeKeys.TRAIN,
+                                           self.config)
+      global_step_tensor = tf.compat.v1.train.get_global_step(g)
+      return self._train_with_estimator_spec(estimator_spec, worker_hooks,
+                                             hooks, global_step_tensor,
+                                             saving_listeners)
+
+  def _train_model_distributed(self, input_fn, hooks, saving_listeners):
+    """Initiate training with `input_fn`, using `DistributionStrategies`.
+
+    Args:
+      input_fn: A function that provides input data for training as minibatches.
+      hooks: List of `tf.train.SessionRunHook` subclass instances. Used for
+        callbacks inside the training loop.
+      saving_listeners: list of `tf.train.CheckpointSaverListener` objects. Used
+        for callbacks that run immediately before or after checkpoint savings.
+
+    Returns:
+      Loss from training
+    """
+    # pylint: disable=protected-access
+    if (hasattr(self._config, '_distribute_coordinator_mode') and
+        self._config._distribute_coordinator_mode):  # pylint: disable=protected-access
+      distribute_coordinator_training.estimator_train(
+          self,
+          lambda est, s, train_hooks: est._actual_train_model_distributed(  # pylint: disable=g-long-lambda
+              s, input_fn, train_hooks, saving_listeners),
+          hooks)
+      return self
+    else:
+      self._config._train_distribute.configure(self._config.session_config)
+      return self._actual_train_model_distributed(
+          self._config._train_distribute, input_fn, hooks, saving_listeners)
+    # pylint: enable=protected-access
+
+  def _actual_train_model_distributed(self, strategy, input_fn, hooks,
+                                      saving_listeners):
+    """That method that does actual training with distribution strategy."""
+    # TODO(sourabhbajaj): Remove this hack once we migrate the other strategies
+    # to use the new API
+    is_tpu_strategy = strategy.__class__.__name__.startswith('TPUStrategy')
+
+    worker_hooks = []
+    with tf.Graph().as_default() as g:
+      # We want to create the iterations variable outside the distribution scope
+      # as that is just stored on the host and mainly used to drive the loop
+      # and doesn't need to be a Mirrored/Device variable.
+      if is_tpu_strategy:
+        steps_per_run_variable = training.get_or_create_steps_per_run_variable()
+
+      # Set flag on the distribution strategy so that optimizer v1 is
+      # distribution aware and scales the losses by number of replicas.
+      # This is required only for backward compatibility with estimator and
+      # V1 optimizer. TF2 will not do this scaling.
+      if hasattr(strategy, '_scale_loss_for_estimator_enabled'):
+        scale_ctx = strategy._scale_loss_for_estimator_enabled()  # pylint: disable=protected-access
+      else:
+        # TODO(psv): Remove this clause after estimator repo gets the
+        # distribute library changes related to loss scaling.
+        @tf_contextlib.contextmanager
+        def nullcontextmanager():
+          yield
+
+        scale_ctx = nullcontextmanager()
+
+      with strategy.scope(), scale_ctx:
+        tf.compat.v1.random.set_random_seed(self._config.tf_random_seed)
+        iterator, input_hooks = self._get_iterator_from_input_fn(
+            input_fn, ModeKeys.TRAIN, strategy)
+        worker_hooks.extend(input_hooks)
+        global_step_tensor = self._create_and_assert_global_step(g)
+        # we want to add to the global collection in the main thread not the
+        # replica threads.
+        tf.compat.v1.add_to_collection(
+            training_util.GLOBAL_STEP_READ_KEY,
+            strategy.extended.read_var(global_step_tensor))
+
+        if is_tpu_strategy:
+          # Create a step_fn from the train_op of grouped_estimator_spec
+          def step_fn(ctx, inputs):
+            """A single step that is passed to run_on_dataset."""
+            if isinstance(inputs, tuple):
+              features, labels = inputs
+            else:
+              features = inputs
+              labels = None
+            estimator_spec = strategy.extended.call_for_each_replica(
+                self._call_model_fn,
+                args=(features, labels, ModeKeys.TRAIN, self.config))
+            ctx.set_last_step_output(
+                name='loss',
+                output=estimator_spec.loss,
+                reduce_op=_get_loss_reduce_op_for_reporting())
+            ctx.set_non_tensor_output(
+                name='estimator_spec', output=estimator_spec)
+            return estimator_spec.train_op
+
+          # Create new train_op post graph rewrites
+          initial_training_loss = tf.constant(1e7)
+          ctx = strategy.extended.experimental_run_steps_on_iterator(
+              step_fn,
+              iterator,
+              iterations=steps_per_run_variable,
+              initial_loop_values={'loss': initial_training_loss})
+          distributed_train_op = ctx.run_op
+          loss = ctx.last_step_outputs['loss']
+          grouped_estimator_spec = ctx.non_tensor_outputs['estimator_spec']
+        else:
+          features, labels = estimator_util.parse_iterator_result(
+              iterator.get_next())
+          grouped_estimator_spec = strategy.extended.call_for_each_replica(
+              self._call_model_fn,
+              args=(
+                  features,
+                  labels,  # although this will be None it seems
+                  ModeKeys.TRAIN,
+                  self.config))
+          loss = strategy.reduce(
+              _get_loss_reduce_op_for_reporting(),
+              grouped_estimator_spec.loss,
+              axis=None)
+          distributed_train_op = grouped_estimator_spec.train_op
+
+        scaffold = _combine_distributed_scaffold(
+            grouped_estimator_spec.scaffold, strategy)
+
+        # TODO(yuefengz): add a test for unwrapping per_device_hooks.
+        def get_hooks_from_the_first_device(per_device_hooks):
+          return [
+              self._train_distribution.experimental_local_results(
+                  per_device_hook)[0] for per_device_hook in per_device_hooks
+          ]
+
+        training_hooks = get_hooks_from_the_first_device(
+            grouped_estimator_spec.training_hooks)
+        training_chief_hooks = get_hooks_from_the_first_device(
+            grouped_estimator_spec.training_chief_hooks)
+        estimator_spec = model_fn_lib.EstimatorSpec(
+            mode=grouped_estimator_spec.mode,
+            loss=loss,
+            train_op=strategy.group(distributed_train_op),
+            training_hooks=training_hooks,
+            training_chief_hooks=training_chief_hooks,
+            scaffold=scaffold)
+        return self._train_with_estimator_spec(estimator_spec, worker_hooks,
+                                               hooks, global_step_tensor,
+                                               saving_listeners)
+
+  def _train_with_estimator_spec_distributed(self, estimator_spec, worker_hooks,
+                                             saving_listener):
+    """Train a model with the given Estimator Spec and Distribution Strategy."""
+    if saving_listener:
+      raise ValueError('Saving listenor is not supported by the current '
+                       'Distribution Strategies.')
+    with training.MonitoredTrainingSession(
+        master=self._config.master,
+        is_chief=self._config.is_chief,
+        checkpoint_dir=self._model_dir,
+        scaffold=estimator_spec.scaffold,
+        hooks=worker_hooks,
+        chief_only_hooks=tuple(estimator_spec.training_chief_hooks),
+        save_checkpoint_secs=self._config.save_checkpoints_secs,
+        save_checkpoint_steps=self._config.save_checkpoints_steps,
+        save_summaries_steps=self._config.save_summary_steps,
+        config=self._session_config,
+        max_wait_secs=self._config.session_creation_timeout_secs,
+        log_step_count_steps=self._config.log_step_count_steps,
+        save_graph_def=self._config.checkpoint_save_graph_def) as mon_sess:
+      loss = None
+      any_step_done = False
+      while not mon_sess.should_stop():
+        _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
+        any_step_done = True
+    if not any_step_done:
+      tf.compat.v1.logging.warn('Training with estimator made no steps. '
+                                'Perhaps input is empty or misspecified.')
+    return loss
+
+  def _train_with_estimator_spec(self, estimator_spec, worker_hooks, hooks,
+                                 global_step_tensor, saving_listeners):
+    """Train a model with the given Estimator Spec."""
+    if (self._warm_start_settings and
+        not tf.train.latest_checkpoint(self._model_dir)):
+      tf.compat.v1.logging.info('Warm-starting with WarmStartSettings: %s' %
+                                (self._warm_start_settings,))
+      tf.compat.v1.train.warm_start(*self._warm_start_settings)
+    # Check if the user created a loss summary, and add one if they didn't.
+    # We assume here that the summary is called 'loss'. If it is not, we will
+    # make another one with the name 'loss' to ensure it shows up in the right
+    # graph in TensorBoard.
+    if not any([
+        x.op.name == 'loss' for x in ops.get_collection(ops.GraphKeys.SUMMARIES)
+    ]):
+      summary.scalar('loss', estimator_spec.loss)
+    ops.add_to_collection(ops.GraphKeys.LOSSES, estimator_spec.loss)
+    worker_hooks.extend(hooks)
+    worker_hooks.append(tf.compat.v1.train.NanTensorHook(estimator_spec.loss))
+    if self._config.log_step_count_steps is not None:
+      worker_hooks.append(
+          tf.compat.v1.train.LoggingTensorHook(
+              {
+                  'loss': estimator_spec.loss,
+                  'step': global_step_tensor
+              },
+              every_n_iter=self._config.log_step_count_steps))
+    worker_hooks.extend(estimator_spec.training_hooks)
+
+    if not (estimator_spec.scaffold.saver or
+            tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.SAVERS)):
+      tf.compat.v1.add_to_collection(
+          tf.compat.v1.GraphKeys.SAVERS,
+          tf.compat.v1.train.Saver(
+              sharded=True,
+              max_to_keep=self._config.keep_checkpoint_max,
+              keep_checkpoint_every_n_hours=(
+                  self._config.keep_checkpoint_every_n_hours),
+              defer_build=True,
+              save_relative_paths=True))
+
+    if (self._config.cluster_spec and type(
+        self._train_distribution).__name__ in ('CollectiveAllReduceStrategy',
+                                               'CollectiveAllReduceStrategyV1',
+                                               'MultiWorkerMirroredStrategy')):
+      return self._train_with_estimator_spec_distributed(
+          estimator_spec, worker_hooks, saving_listeners)
+
+    chief_hooks = []
+    all_hooks = worker_hooks + list(estimator_spec.training_chief_hooks)
+    saver_hooks = [
+        h for h in all_hooks
+        if isinstance(h, tf.compat.v1.train.CheckpointSaverHook)
+    ]
+    if (self._config.save_checkpoints_secs or
+        self._config.save_checkpoints_steps):
+      if not saver_hooks:
+        chief_hooks = [
+            tf.compat.v1.train.CheckpointSaverHook(
+                self._model_dir,
+                save_secs=self._config.save_checkpoints_secs,
+                save_steps=self._config.save_checkpoints_steps,
+                scaffold=estimator_spec.scaffold,
+                save_graph_def=self._config.checkpoint_save_graph_def)
+        ]
+        saver_hooks = [chief_hooks[0]]
+    if saving_listeners:
+      if not saver_hooks:
+        raise ValueError(
+            'There should be a CheckpointSaverHook to use saving_listeners. '
+            'Please set one of the RunConfig.save_checkpoints_steps or '
+            'RunConfig.save_checkpoints_secs.')
+      else:
+        # It is expected to have one CheckpointSaverHook. If multiple, we pick
+        # up the first one to add listener.
+        for listener in saving_listeners:
+          # pylint: disable=protected-access
+          if listener not in saver_hooks[0]._listeners:
+            saver_hooks[0]._listeners.append(listener)
+          # pylint: disable=protected-access
+
+    # Add summary hooks to worker 0 if we are running with a master, to ensure
+    # that summaries are written at correct intervals even with long-running
+    # evaluations.
+    save_summary_steps = self._config.save_summary_steps
+    log_step_count_steps = self._config.log_step_count_steps
+
+    # Check existence of appropriate cluster spec fields, as well as master and
+    # worker nodes. As master also performs evaluation, summary writing must
+    # occur on a different node. The presence of a worker is also checked to
+    # prevent reassigning hooks for single-replica jobs with just a master node.
+    if (self._config.cluster_spec and self._config.cluster_spec.jobs and
+        (run_config.TaskType.WORKER in self._config.cluster_spec.jobs) and
+        (run_config.TaskType.MASTER in self._config.cluster_spec.jobs)):
+      # Update config values to prevent the default hooks from being created on
+      # the master or other workers.
+      save_summary_steps = 0
+      log_step_count_steps = None
+
+      if (self._config.task_type == run_config.TaskType.WORKER and
+          self._config.task_id == 0):
+        if (self._config.save_summary_steps and
+            self._config.save_summary_steps > 0):
+          worker_hooks.append(
+              tf.compat.v1.train.SummarySaverHook(
+                  save_steps=self._config.save_summary_steps,
+                  output_dir=self._config.model_dir,
+                  scaffold=estimator_spec.scaffold))
+
+        if (self._config.log_step_count_steps and
+            self._config.log_step_count_steps > 0):
+          worker_hooks.append(
+              tf.compat.v1.train.StepCounterHook(
+                  every_n_steps=self._config.log_step_count_steps,
+                  output_dir=self._config.model_dir))
+
+    with training.MonitoredTrainingSession(
+        master=self._config.master,
+        is_chief=self._config.is_chief,
+        checkpoint_dir=self._model_dir,
+        scaffold=estimator_spec.scaffold,
+        hooks=worker_hooks,
+        chief_only_hooks=(tuple(chief_hooks) +
+                          tuple(estimator_spec.training_chief_hooks)),
+        save_checkpoint_secs=0,  # Saving is handled by a hook.
+        save_summaries_steps=save_summary_steps,
+        config=self._session_config,
+        max_wait_secs=self._config.session_creation_timeout_secs,
+        log_step_count_steps=log_step_count_steps,
+        save_graph_def=self._config.checkpoint_save_graph_def) as mon_sess:
+      loss = None
+      any_step_done = False
+      while not mon_sess.should_stop():
+        _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
+        any_step_done = True
+    if not any_step_done:
+      tf.compat.v1.logging.warn('Training with estimator made no steps. '
+                                'Perhaps input is empty or misspecified.')
+    return loss
+
+  def _evaluate_build_graph(self, input_fn, hooks=None, checkpoint_path=None):
+    """Builds the graph and related hooks to run evaluation."""
+    tf.compat.v1.random.set_random_seed(self._config.tf_random_seed)
+    self._create_and_assert_global_step(tf.compat.v1.get_default_graph())
+
+    if self._eval_distribution:
+      (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict) = (
+          self._call_model_fn_eval_distributed(input_fn, self.config))
+    else:
+      (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict) = (
+          self._call_model_fn_eval(input_fn, self.config))
+
+    global_step_tensor = tf.compat.v1.train.get_global_step(
+        tf.compat.v1.get_default_graph())
+    # Call to warm_start has to be after model_fn is called.
+    self._maybe_warm_start(checkpoint_path)
+
+    if tf.compat.v1.GraphKeys.GLOBAL_STEP in eval_dict:
+      raise ValueError(
+          'Metric with name `global_step` is not allowed, because Estimator '
+          'already defines a default metric with the same name.')
+    eval_dict[tf.compat.v1.GraphKeys.GLOBAL_STEP] = global_step_tensor
+
+    all_hooks = list(input_hooks)
+    all_hooks.extend(hooks)
+    all_hooks.extend(list(evaluation_hooks or []))
+    # New local variables have been added, so update the estimator spec's
+    # local init op if it was defined.
+    if scaffold and scaffold.local_init_op:
+      # Ensure that eval step has been created before updating local init op.
+      evaluation._get_or_create_eval_step()  # pylint: disable=protected-access
+
+      scaffold = tf.compat.v1.train.Scaffold(
+          local_init_op=tf.group(
+              scaffold.local_init_op,
+              tf.compat.v1.train.Scaffold.default_local_init_op()),
+          copy_from_scaffold=scaffold)
+
+    return scaffold, update_op, eval_dict, all_hooks
+
+  def _call_model_fn_eval(self, input_fn, config):
+    """Call model_fn for evaluation and handle return values."""
+    features, labels, input_hooks = self._get_features_and_labels_from_input_fn(
+        input_fn, ModeKeys.EVAL)
+
+    estimator_spec = self._call_model_fn(features, labels, ModeKeys.EVAL,
+                                         config)
+    eval_metric_ops = _verify_and_create_loss_metric(
+        estimator_spec.eval_metric_ops, estimator_spec.loss)
+    update_op, eval_dict = _extract_metric_update_ops(eval_metric_ops)
+    return (estimator_spec.scaffold, estimator_spec.evaluation_hooks,
+            input_hooks, update_op, eval_dict)
+
+  def _call_model_fn_eval_distributed(self, input_fn, config):
+    """Call model_fn in distribution mode and handle return values."""
+
+    iterator, input_hooks = self._get_iterator_from_input_fn(
+        input_fn, ModeKeys.EVAL, self._eval_distribution)
+
+    is_tpu_strategy = (
+        self._eval_distribution.__class__.__name__.startswith('TPUStrategy'))
+
+    if is_tpu_strategy:
+      steps_per_run_variable = training.get_or_create_steps_per_run_variable()
+
+      def step_fn(ctx, inputs):
+        """Runs one step of the eval computation and captures outputs."""
+        if isinstance(inputs, tuple):
+          features, labels = inputs
+        else:
+          features = inputs
+          labels = None
+        estimator_spec = self._eval_distribution.extended.call_for_each_replica(
+            self._call_model_fn, args=(features, labels, ModeKeys.EVAL, config))
+        eval_metric_ops = _verify_and_create_loss_metric(
+            estimator_spec.eval_metric_ops, estimator_spec.loss,
+            self._eval_distribution)
+        update_op, eval_dict = _extract_metric_update_ops(
+            eval_metric_ops, self._eval_distribution)
+        ctx.set_non_tensor_output(name='estimator_spec', output=estimator_spec)
+        ctx.set_non_tensor_output(name='eval_dict', output=eval_dict)
+        return update_op
+
+      # TODO(priyag): Fix eval step hook to account for steps_per_run.
+      ctx = self._eval_distribution.extended.experimental_run_steps_on_iterator(
+          step_fn, iterator, iterations=steps_per_run_variable)
+      update_op = ctx.run_op
+      eval_dict = ctx.non_tensor_outputs['eval_dict']
+      grouped_estimator_spec = ctx.non_tensor_outputs['estimator_spec']
+    else:
+      features, labels = estimator_util.parse_iterator_result(
+          iterator.get_next())
+      grouped_estimator_spec = (
+          self._eval_distribution.extended.call_for_each_replica(
+              self._call_model_fn,
+              args=(features, labels, ModeKeys.EVAL, config)))
+      eval_metric_ops = _verify_and_create_loss_metric(
+          grouped_estimator_spec.eval_metric_ops, grouped_estimator_spec.loss,
+          self._eval_distribution)
+      update_op, eval_dict = _extract_metric_update_ops(eval_metric_ops,
+                                                        self._eval_distribution)
+
+    scaffold = _combine_distributed_scaffold(grouped_estimator_spec.scaffold,
+                                             self._eval_distribution)
+
+    def get_hooks_from_the_first_device(per_device_hooks):
+      return [
+          self._eval_distribution.experimental_local_results(per_device_hook)[0]
+          for per_device_hook in per_device_hooks
+      ]
+
+    evaluation_hooks = get_hooks_from_the_first_device(
+        grouped_estimator_spec.evaluation_hooks)
+
+    return (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict)
+
+  def _evaluate_run(self, checkpoint_path, scaffold, update_op, eval_dict,
+                    all_hooks, output_dir):
+    """Run evaluation."""
+    eval_results = evaluation._evaluate_once(  # pylint: disable=protected-access
+        checkpoint_path=checkpoint_path,
+        master=self._config.evaluation_master,
+        scaffold=scaffold,
+        eval_ops=update_op,
+        final_ops=eval_dict,
+        hooks=all_hooks,
+        config=self._session_config)
+
+    current_global_step = eval_results[tf.compat.v1.GraphKeys.GLOBAL_STEP]
+
+    _write_dict_to_summary(
+        output_dir=output_dir,
+        dictionary=eval_results,
+        current_global_step=current_global_step)
+
+    if checkpoint_path:
+      _write_checkpoint_path_to_summary(
+          output_dir=output_dir,
+          checkpoint_path=checkpoint_path,
+          current_global_step=current_global_step)
+
+    return eval_results
+
+  def _maybe_warm_start(self, checkpoint_path):
+    if not checkpoint_path and self._warm_start_settings:
+      tf.compat.v1.logging.info('Warm-starting with WarmStartSettings: %s' %
+                                (self._warm_start_settings,))
+      tf.compat.v1.train.warm_start(*self._warm_start_settings)
+
+  @deprecation.deprecated(
+      None, 'This function has been renamed, use `export_saved_model` instead.')
+  def export_savedmodel(self,
+                        export_dir_base,
+                        serving_input_receiver_fn,
+                        assets_extra=None,
+                        as_text=False,
+                        checkpoint_path=None,
+                        strip_default_attrs=False):
+    # pylint: disable=line-too-long
+    """Exports inference graph as a `SavedModel` into the given dir.
+
+    For a detailed guide, see
+    [SavedModel from
+    Estimators.](https://www.tensorflow.org/guide/estimator#savedmodels_from_estimators).
+
+    This method builds a new graph by first calling the
+    `serving_input_receiver_fn` to obtain feature `Tensor`s, and then calling
+    this `Estimator`'s `model_fn` to generate the model graph based on those
+    features. It restores the given checkpoint (or, lacking that, the most
+    recent checkpoint) into this graph in a fresh session.  Finally it creates
+    a timestamped export directory below the given `export_dir_base`, and writes
+    a `SavedModel` into it containing a single `tf.MetaGraphDef` saved from this
+    session.
+
+    The exported `MetaGraphDef` will provide one `SignatureDef` for each
+    element of the `export_outputs` dict returned from the `model_fn`, named
+    using the same keys.  One of these keys is always
+    `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`,
+    indicating which signature will be served when a serving request does not
+    specify one. For each signature, the outputs are provided by the
+    corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always
+    the input receivers provided by the `serving_input_receiver_fn`.
+
+    Extra assets may be written into the `SavedModel` via the `assets_extra`
+    argument.  This should be a dict, where each key gives a destination path
+    (including the filename) relative to the assets.extra directory.  The
+    corresponding value gives the full path of the source file to be copied.
+    For example, the simple case of copying a single file without renaming it
+    is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+
+    Args:
+      export_dir_base: A string containing a directory in which to create
+        timestamped subdirectories containing exported `SavedModel`s.
+      serving_input_receiver_fn: A function that takes no argument and returns a
+        `tf.estimator.export.ServingInputReceiver` or
+        `tf.estimator.export.TensorServingInputReceiver`.
+      assets_extra: A dict specifying how to populate the assets.extra directory
+        within the exported `SavedModel`, or `None` if no extra assets are
+        needed.
+      as_text: whether to write the `SavedModel` proto in text format.
+      checkpoint_path: The checkpoint path to export.  If `None` (the default),
+        the most recent checkpoint found within the model directory is chosen.
+      strip_default_attrs: Boolean. If `True`, default-valued attributes will be
+        removed from the `NodeDef`s. For a detailed guide, see [Stripping
+        Default-Valued Attributes](
+        https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
+
+    Returns:
+      The path to the exported directory as a bytes object.
+
+    Raises:
+      ValueError: if no `serving_input_receiver_fn` is provided, no
+      `export_outputs` are provided, or no checkpoint can be found.
+    """
+    # pylint: enable=line-too-long
+    if not serving_input_receiver_fn:
+      raise ValueError('An input_receiver_fn must be defined.')
+
+    return self._export_all_saved_models(
+        export_dir_base, {ModeKeys.PREDICT: serving_input_receiver_fn},
+        assets_extra=assets_extra,
+        as_text=as_text,
+        checkpoint_path=checkpoint_path,
+        strip_default_attrs=strip_default_attrs)
+
+
+@estimator_export('estimator.Estimator', v1=[])  # pylint: disable=missing-docstring
+class EstimatorV2(Estimator):
+  __doc__ = Estimator.__doc__
+
+  export_savedmodel = deprecation.hide_attribute_from_api(
+      '`Estimator.export_savedmodel` has been deprecated. Please use '
+      '`export_saved_model` instead.')
+
+  def _assert_members_are_not_overridden(self):
+    """Asserts members of `Estimator` are not overridden."""
+    _assert_members_are_not_overridden(EstimatorV2, self)
+
+
+def _get_loss_reduce_op_for_reporting():
+  graph = tf.compat.v1.get_default_graph()
+  if getattr(graph, '_is_loss_scaled_by_optimizer', False):  # pylint: disable=protected-access
+    return tf.compat.v1.distribute.get_loss_reduction()
+  return tf.distribute.ReduceOp.SUM
+
+
+def _assert_members_are_not_overridden(cls, obj):
+  """Assert Estimator methods are not overwritten."""
+  # TPUEstimator is special cased (owned by TF).
+  if obj.__class__.__name__ == 'TPUEstimator':
+    return
+
+  allowed_overrides = set([
+      'model_fn', '_create_and_assert_global_step', '_export_all_saved_models',
+      '_tf_api_names', '_tf_api_names_v1', '_estimator_api_names',
+      '_estimator_api_names_v1', '_estimator_api_constants',
+      '_estimator_api_constants_v1', 'latest_checkpoint'
+  ])
+
+  estimator_members = set([m for m in dir(cls) if not m.startswith('__')])
+  subclass_members = set(obj.__class__.__dict__.keys())
+  common_members = estimator_members & subclass_members - allowed_overrides
+  overridden_members = [
+      m for m in common_members if getattr(cls, m) != getattr(obj.__class__, m)
+  ]
+  if overridden_members:
+    raise ValueError(
+        'Subclasses of Estimator cannot override members of Estimator. '
+        '{} does override {}'.format(obj.__class__, overridden_members))
+
+
+def _verify_and_create_loss_metric(eval_metric_ops, loss, distribution=None):
+  """Creates a metric for loss and throws an error if one already exists."""
+  if model_fn_lib.LOSS_METRIC_KEY in eval_metric_ops:
+    raise ValueError(
+        'Metric with name "%s" is not allowed, because Estimator ' %
+        (model_fn_lib.LOSS_METRIC_KEY) +
+        'already defines a default metric with the same name.')
+
+  if distribution is None:
+    loss_metric = tf.compat.v1.metrics.mean(loss)
+  else:
+    loss_metric = distribution.extended.call_for_each_replica(
+        tf.compat.v1.metrics.mean, args=(loss,))
+  eval_metric_ops[model_fn_lib.LOSS_METRIC_KEY] = loss_metric
+  return eval_metric_ops
+
+
+def maybe_overwrite_model_dir_and_session_config(config, model_dir):
+  """Overwrite estimator config by `model_dir` and `session_config` if needed.
+
+  Args:
+    config: Original estimator config.
+    model_dir: Estimator model checkpoint directory.
+
+  Returns:
+    Overwritten estimator config.
+
+  Raises:
+    ValueError: Model directory inconsistent between `model_dir` and `config`.
+  """
+
+  if config is None:
+    config = run_config.RunConfig()
+    tf.compat.v1.logging.info('Using default config.')
+  if not isinstance(config, run_config.RunConfig):
+    raise ValueError(
+        'config must be an instance of `RunConfig`, but provided %s.' % config)
+
+  if config.session_config is None:
+    session_config = run_config.get_default_session_config()
+    config = run_config.RunConfig.replace(config, session_config=session_config)
+
+  model_dir = compat_internal.path_to_str(model_dir)
+  if model_dir is not None:
+    if (getattr(config, 'model_dir', None) is not None and
+        config.model_dir != model_dir):
+      raise ValueError(
+          '`model_dir` are set both in constructor and `RunConfig`, but with '
+          "different values. In constructor: '{}', in `RunConfig`: "
+          "'{}' ".format(model_dir, config.model_dir))
+  if model_dir:
+    config = run_config.RunConfig.replace(config, model_dir=model_dir)
+  elif getattr(config, 'model_dir', None) is None:
+    model_dir = tempfile.mkdtemp()
+    tf.compat.v1.logging.warn('Using temporary folder as model directory: %s',
+                              model_dir)
+    config = run_config.RunConfig.replace(config, model_dir=model_dir)
+
+  return config
+
+
+def create_per_replica_ready_for_local_init_op(scaffold):
+  """Create a `tf.train.Scaffold.ready_for_local_init_op` inside a replica."""
+  if scaffold.ready_for_local_init_op:
+    return scaffold.ready_for_local_init_op
+
+  def default_ready_for_local_init_op():
+    return tf.compat.v1.report_uninitialized_variables(
+        tf.compat.v1.global_variables())
+
+  return tf.compat.v1.train.Scaffold.get_or_default(
+      'ready_for_local_init_op', tf.compat.v1.GraphKeys.READY_FOR_LOCAL_INIT_OP,
+      default_ready_for_local_init_op)
+
+
+def _combine_distributed_scaffold(grouped_scaffold, distribution):
+  """Combines scaffold(s) returned from `call_for_each_replica`."""
+
+  # TODO(anjalisridhar): Figure out how to resolve the following scaffold
+  # parameters: init_feed_dict, init_fn.
+  scaffold_list = distribution.experimental_local_results(grouped_scaffold)
+  init_feed_dict = [
+      s.init_feed_dict for s in scaffold_list if s.init_feed_dict is not None
+  ]
+  if init_feed_dict:
+    init_feed_dict = distribution.group(init_feed_dict)
+  else:
+    init_feed_dict = None
+
+  init_fn = [
+      s._user_init_fn for s in scaffold_list if s._user_init_fn is not None  # pylint: disable=protected-access
+  ]
+  if init_fn:
+    init_fn = init_fn[0]
+  else:
+    init_fn = None
+
+  init_op = [s.init_op for s in scaffold_list if s.init_op is not None]
+  if init_op:
+    init_op = distribution.group(init_op)
+  else:
+    init_op = None
+
+  def _unwrap_and_concat(value):
+    value = tf.nest.flatten(distribution.experimental_local_results(value))
+    if len(value) != 1:
+      return tf.concat(value, 0)
+    return value[0]
+
+  ready_op = distribution.extended.call_for_each_replica(
+      lambda scaffold: scaffold.ready_op, args=(grouped_scaffold,))
+  if ready_op is not None:
+    ready_op = _unwrap_and_concat(ready_op)
+
+  ready_for_local_init_op = distribution.extended.call_for_each_replica(
+      create_per_replica_ready_for_local_init_op, args=(grouped_scaffold,))
+  if ready_for_local_init_op is not None:
+    ready_for_local_init_op = _unwrap_and_concat(ready_for_local_init_op)
+  else:
+    ready_for_local_init_op = None
+
+  local_init_op = [
+      s.local_init_op for s in scaffold_list if s.local_init_op is not None
+  ]
+  if local_init_op:
+    local_init_op = distribution.group(local_init_op)
+  else:
+    local_init_op = None
+
+  summary_op = [s.summary_op for s in scaffold_list if s.summary_op is not None]
+  if summary_op:
+    summary_op = distribution.group(summary_op)
+  else:
+    summary_op = None
+
+  savers = [s.saver for s in scaffold_list if s.saver is not None]
+  if savers:
+    saver = savers[0]
+  else:
+    saver = None
+
+  scaffold = tf.compat.v1.train.Scaffold(
+      init_op=init_op,
+      ready_op=ready_op,
+      ready_for_local_init_op=ready_for_local_init_op,
+      local_init_op=local_init_op,
+      summary_op=summary_op,
+      saver=saver,
+      init_feed_dict=init_feed_dict,
+      init_fn=init_fn)
+  return scaffold
+
+
+def _check_checkpoint_available(model_dir):
+  latest_path = tf.train.latest_checkpoint(model_dir)
+  if not latest_path:
+    raise ValueError(
+        'Could not find trained model in model_dir: {}.'.format(model_dir))
+
+
+def _check_hooks_type(hooks):
+  """Returns hooks if all are `SessionRunHook`, raises TypeError otherwise."""
+  hooks = list(hooks or [])
+  for h in hooks:
+    if not isinstance(h, tf.compat.v1.train.SessionRunHook):
+      raise TypeError('Hooks must be a SessionRunHook, given: {}'.format(h))
+  return hooks
+
+
+def _check_listeners_type(saving_listeners):
+  """Check listeners type."""
+  listeners = list(saving_listeners or [])
+  for l in listeners:
+    if not isinstance(l, tf.compat.v1.train.CheckpointSaverListener):
+      raise TypeError(
+          'saving_listeners must be a list of CheckpointSaverListener, '
+          'given: {}'.format(l))
+  return listeners
+
+
+def _get_replica_device_setter(config):
+  """Creates a replica device setter if required as a default `device_fn`.
+
+  `Estimator` uses `tf.train.ReplicaDeviceSetter` as a default device placer. It
+  sets the distributed related arguments such as number of `ps_replicas` based
+  on given `config`.
+
+  Args:
+    config: A `tf.estimator.RunConfig` instance.
+
+  Returns:
+    A replica device setter, or `None`.
+  """
+  if config.task_type:
+    worker_device = '/job:%s/task:%d' % (config.task_type, config.task_id)
+  else:
+    worker_device = '/job:worker'
+
+  if config.num_ps_replicas > 0:
+    return tf.compat.v1.train.replica_device_setter(
+        ps_tasks=config.num_ps_replicas,
+        worker_device=worker_device,
+        merge_devices=True,
+        ps_ops=list(device_setter.STANDARD_PS_OPS),
+        cluster=config.cluster_spec)
+  else:
+    return None
+
+
+def _verify_model_fn_args(model_fn, params):
+  """Verifies `model_fn` arguments."""
+  args = set(function_utils.fn_args(model_fn))
+  if 'features' not in args:
+    raise ValueError('model_fn (%s) must include features argument.' % model_fn)
+  if params is not None and 'params' not in args:
+    raise ValueError('model_fn (%s) does not include params argument, '
+                     'but params (%s) is passed to Estimator.' %
+                     (model_fn, params))
+  if params is None and 'params' in args:
+    tf.compat.v1.logging.warn(
+        'Estimator\'s model_fn (%s) includes params '
+        'argument, but params are not passed to Estimator.', model_fn)
+  non_valid_args = list(args - _VALID_MODEL_FN_ARGS)
+  if non_valid_args:
+    raise ValueError('model_fn (%s) has following not expected args: %s' %
+                     (model_fn, non_valid_args))
+
+
+def _load_global_step_from_checkpoint_dir(checkpoint_dir):
+  try:
+    checkpoint_reader = tf.compat.v1.train.NewCheckpointReader(
+        tf.train.latest_checkpoint(checkpoint_dir))
+    return checkpoint_reader.get_tensor(tf.compat.v1.GraphKeys.GLOBAL_STEP)
+  except:  # pylint: disable=bare-except
+    return 0
+
+
+def _extract_metric_update_ops(eval_dict, distribution=None):
+  """Separate update operations from metric value operations."""
+  update_ops = []
+  value_ops = {}
+  # Sort metrics lexicographically so graph is identical every time.
+  for name, value in sorted(six.iteritems(eval_dict)):
+    value_ops[name] = value[0]
+    update_ops.append(
+        distribution.group(value[1]) if distribution else value[1])
+
+  update_op = tf.group(*update_ops) if update_ops else None
+  return update_op, value_ops
+
+
+def _dict_to_str(dictionary):
+  """Get a `str` representation of a `dict`.
+
+  Args:
+    dictionary: The `dict` to be represented as `str`.
+
+  Returns:
+    A `str` representing the `dictionary`.
+  """
+  return ', '.join('%s = %s' % (k, v)
+                   for k, v in sorted(six.iteritems(dictionary))
+                   if not isinstance(v, six.binary_type))
+
+
+def _write_dict_to_summary(output_dir, dictionary, current_global_step):
+  """Writes a `dict` into summary file in given output directory.
+
+  Args:
+    output_dir: `str`, directory to write the summary file in.
+    dictionary: the `dict` to be written to summary file.
+    current_global_step: `int`, the current global step.
+  """
+  tf.compat.v1.logging.info('Saving dict for global step %d: %s',
+                            current_global_step, _dict_to_str(dictionary))
+  summary_writer = tf.compat.v1.summary.FileWriterCache.get(output_dir)
+  summary_proto = summary_pb2.Summary()
+  for key in dictionary:
+    if dictionary[key] is None:
+      continue
+    if key == 'global_step':
+      continue
+    if (isinstance(dictionary[key], np.float32) or
+        isinstance(dictionary[key], float)):
+      summary_proto.value.add(tag=key, simple_value=float(dictionary[key]))
+    elif (isinstance(dictionary[key], np.int64) or
+          isinstance(dictionary[key], np.int32) or
+          isinstance(dictionary[key], int)):
+      summary_proto.value.add(tag=key, simple_value=int(dictionary[key]))
+    elif isinstance(dictionary[key], six.binary_type):
+      try:
+        summ = summary_pb2.Summary.FromString(dictionary[key])
+        for i, _ in enumerate(summ.value):
+          summ.value[i].tag = '%s/%d' % (key, i)
+        summary_proto.value.extend(summ.value)
+      except message.DecodeError:
+        tf.compat.v1.logging.warn(
+            'Skipping summary for %s, cannot parse string to Summary.', key)
+        continue
+    elif isinstance(dictionary[key], np.ndarray):
+      value = summary_proto.value.add()
+      value.tag = key
+      value.node_name = key
+      tensor_proto = tf.make_tensor_proto(dictionary[key])
+      value.tensor.CopyFrom(tensor_proto)
+      # pylint: disable=line-too-long
+      tf.compat.v1.logging.info(
+          'Summary for np.ndarray is not visible in Tensorboard by default. '
+          'Consider using a Tensorboard plugin for visualization (see '
+          'https://github.com/tensorflow/tensorboard-plugin-example/blob/master/README.md'
+          ' for more information).')
+      # pylint: enable=line-too-long
+    else:
+      tf.compat.v1.logging.warn(
+          'Skipping summary for %s, must be a float, np.float32, np.int64, '
+          'np.int32 or int or np.ndarray or a serialized string of Summary.',
+          key)
+  summary_writer.add_summary(summary_proto, current_global_step)
+  summary_writer.flush()
+
+
+def _write_checkpoint_path_to_summary(output_dir, checkpoint_path,
+                                      current_global_step):
+  """Writes `checkpoint_path` into summary file in the given output directory.
+
+  Args:
+    output_dir: `str`, directory to write the summary file in.
+    checkpoint_path: `str`, checkpoint file path to be written to summary file.
+    current_global_step: `int`, the current global step.
+  """
+
+  checkpoint_path_tag = 'checkpoint_path'
+
+  tf.compat.v1.logging.info('Saving \'%s\' summary for global step %d: %s',
+                            checkpoint_path_tag, current_global_step,
+                            checkpoint_path)
+  summary_proto = summary_pb2.Summary()
+  summary_proto.value.add(
+      tag=checkpoint_path_tag,
+      tensor=tf.make_tensor_proto(checkpoint_path, dtype=tf.dtypes.string))
+  summary_writer = tf.compat.v1.summary.FileWriterCache.get(output_dir)
+  summary_writer.add_summary(summary_proto, current_global_step)
+  summary_writer.flush()
+
+
+def _has_dataset_or_queue_runner(maybe_tensor):
+  """Returns `True` if `Dataset` or `QueueRunner` has been used."""
+  # Check TF dataset first. Here, we use a simple algorithm to check the top
+  # level Tensors only, which should be sufficient for most users.
+  tensors = [
+      x for x in tf.nest.flatten(maybe_tensor) if isinstance(x, tf.Tensor)
+  ]
+  if any([t.op.type == 'IteratorGetNext' for t in tensors]):
+    return True
+
+  # Now, check queue.
+  return tf.compat.v1.get_default_graph().get_collection(
+      tf.compat.v1.GraphKeys.QUEUE_RUNNERS)
+
+
+VocabInfo = tf.compat.v1.train.VocabInfo  # pylint: disable=invalid-name
+estimator_export('estimator.VocabInfo')(VocabInfo)
+
+
+@estimator_export('estimator.WarmStartSettings')
+class WarmStartSettings(
+    collections.namedtuple('WarmStartSettings', [
+        'ckpt_to_initialize_from',
+        'vars_to_warm_start',
+        'var_name_to_vocab_info',
+        'var_name_to_prev_var_name',
+    ])):
+  """Settings for warm-starting in `tf.estimator.Estimators`.
+
+  Example Use with canned `tf.estimator.DNNEstimator`:
+
+  ```
+  emb_vocab_file = tf.feature_column.embedding_column(
+      tf.feature_column.categorical_column_with_vocabulary_file(
+          "sc_vocab_file", "new_vocab.txt", vocab_size=100),
+      dimension=8)
+  emb_vocab_list = tf.feature_column.embedding_column(
+      tf.feature_column.categorical_column_with_vocabulary_list(
+          "sc_vocab_list", vocabulary_list=["a", "b"]),
+      dimension=8)
+  estimator = tf.estimator.DNNClassifier(
+    hidden_units=[128, 64], feature_columns=[emb_vocab_file, emb_vocab_list],
+    warm_start_from=ws)
+  ```
+
+  where `ws` could be defined as:
+
+  Warm-start all weights in the model (input layer and hidden weights).
+  Either the directory or a specific checkpoint can be provided (in the case
+  of the former, the latest checkpoint will be used):
+
+  ```
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp")
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000")
+  ```
+
+  Warm-start only the embeddings (input layer):
+
+  ```
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
+                         vars_to_warm_start=".*input_layer.*")
+  ```
+
+  Warm-start all weights but the embedding parameters corresponding to
+  `sc_vocab_file` have a different vocab from the one used in the current
+  model:
+
+  ```
+  vocab_info = tf.estimator.VocabInfo(
+      new_vocab=sc_vocab_file.vocabulary_file,
+      new_vocab_size=sc_vocab_file.vocabulary_size,
+      num_oov_buckets=sc_vocab_file.num_oov_buckets,
+      old_vocab="old_vocab.txt"
+  )
+  ws = WarmStartSettings(
+      ckpt_to_initialize_from="/tmp",
+      var_name_to_vocab_info={
+          "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
+      })
+  ```
+
+  Warm-start only `sc_vocab_file` embeddings (and no other variables), which
+  have a different vocab from the one used in the current model:
+
+  ```
+  vocab_info = tf.estimator.VocabInfo(
+      new_vocab=sc_vocab_file.vocabulary_file,
+      new_vocab_size=sc_vocab_file.vocabulary_size,
+      num_oov_buckets=sc_vocab_file.num_oov_buckets,
+      old_vocab="old_vocab.txt"
+  )
+  ws = WarmStartSettings(
+      ckpt_to_initialize_from="/tmp",
+      vars_to_warm_start=None,
+      var_name_to_vocab_info={
+          "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
+      })
+  ```
+
+  Warm-start all weights but the parameters corresponding to `sc_vocab_file`
+  have a different vocab from the one used in current checkpoint, and only
+  100 of those entries were used:
+
+  ```
+  vocab_info = tf.estimator.VocabInfo(
+      new_vocab=sc_vocab_file.vocabulary_file,
+      new_vocab_size=sc_vocab_file.vocabulary_size,
+      num_oov_buckets=sc_vocab_file.num_oov_buckets,
+      old_vocab="old_vocab.txt",
+      old_vocab_size=100
+  )
+  ws = WarmStartSettings(
+      ckpt_to_initialize_from="/tmp",
+      var_name_to_vocab_info={
+          "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
+      })
+  ```
+
+  Warm-start all weights but the parameters corresponding to `sc_vocab_file`
+  have a different vocab from the one used in current checkpoint and the
+  parameters corresponding to `sc_vocab_list` have a different name from the
+  current checkpoint:
+
+  ```
+  vocab_info = tf.estimator.VocabInfo(
+      new_vocab=sc_vocab_file.vocabulary_file,
+      new_vocab_size=sc_vocab_file.vocabulary_size,
+      num_oov_buckets=sc_vocab_file.num_oov_buckets,
+      old_vocab="old_vocab.txt",
+      old_vocab_size=100
+  )
+  ws = WarmStartSettings(
+      ckpt_to_initialize_from="/tmp",
+      var_name_to_vocab_info={
+          "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
+      },
+      var_name_to_prev_var_name={
+          "input_layer/sc_vocab_list_embedding/embedding_weights":
+              "old_tensor_name"
+      })
+  ```
+
+  Warm-start all TRAINABLE variables:
+
+  ```
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
+                         vars_to_warm_start=".*")
+  ```
+
+  Warm-start all variables (including non-TRAINABLE):
+
+  ```
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
+                         vars_to_warm_start=[".*"])
+  ```
+
+  Warm-start non-TRAINABLE variables "v1", "v1/Momentum", and "v2" but not
+  "v2/momentum":
+
+  ```
+  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
+                         vars_to_warm_start=["v1", "v2[^/]"])
+  ```
+
+  Attributes:
+    ckpt_to_initialize_from: [Required] A string specifying the directory with
+      checkpoint file(s) or path to checkpoint from which to warm-start the
+      model parameters.
+    vars_to_warm_start: [Optional] One of the following:
+
+      * A regular expression (string) that captures which variables to
+        warm-start (see tf.compat.v1.get_collection).  This expression will only
+        consider variables in the TRAINABLE_VARIABLES collection -- if you need
+        to warm-start non_TRAINABLE vars (such as optimizer accumulators or
+        batch norm statistics), please use the below option.
+      * A list of strings, each a regex scope provided to
+        tf.compat.v1.get_collection with GLOBAL_VARIABLES (please see
+        tf.compat.v1.get_collection).  For backwards compatibility reasons, this
+        is separate from the single-string argument type.
+      * A list of Variables to warm-start.  If you do not have access to the
+        `Variable` objects at the call site, please use the above option.
+      * `None`, in which case only TRAINABLE variables specified in
+        `var_name_to_vocab_info` will be warm-started.
+
+      Defaults to `'.*'`, which warm-starts all variables in the
+      TRAINABLE_VARIABLES collection. Note that this excludes variables such as
+      accumulators and moving statistics from batch norm.
+    var_name_to_vocab_info: [Optional] Dict of variable names (strings) to
+      `tf.estimator.VocabInfo`. The variable names should be "full" variables,
+      not the names of the partitions.  If not explicitly provided, the variable
+      is assumed to have no (changes to) vocabulary.
+    var_name_to_prev_var_name: [Optional] Dict of variable names (strings) to
+      name of the previously-trained variable in `ckpt_to_initialize_from`. If
+      not explicitly provided, the name of the variable is assumed to be same
+      between previous checkpoint and current model.  Note that this has no
+      effect on the set of variables that is warm-started, and only controls
+      name mapping (use `vars_to_warm_start` for controlling what variables to
+      warm-start).
+  """
+
+  def __new__(cls,
+              ckpt_to_initialize_from,
+              vars_to_warm_start='.*',
+              var_name_to_vocab_info=None,
+              var_name_to_prev_var_name=None):
+    if not ckpt_to_initialize_from:
+      raise ValueError(
+          '`ckpt_to_initialize_from` MUST be set in WarmStartSettings')
+    return super(WarmStartSettings, cls).__new__(
+        cls,
+        ckpt_to_initialize_from,
+        vars_to_warm_start,
+        var_name_to_vocab_info or {},
+        var_name_to_prev_var_name or {},
+    )
+
+
+def _get_default_warm_start_settings(warm_start_from):
+  """Returns default `tf.estimator.WarmStartSettings`.
+
+  Args:
+    warm_start_from: Either a string representing the filepath of a checkpoint
+      or `SavedModel` to initialize from, or an instance of
+      `tf.estimator.WarmStartSettings`.
+
+  Returns:
+    Either None or an instance of `WarmStartSettings`.
+
+  Raises:
+    ValueError: If `warm_start_from` is not `None` but is neither a string nor
+    an instance of `WarmStartSettings`.
+  """
+  if warm_start_from is None:
+    return None
+  if isinstance(warm_start_from, (six.string_types, six.binary_type)):
+    # Infer that this is a SavedModel if export_path +
+    # 'variables/variables.index' exists, and if so, construct the
+    # WarmStartSettings pointing to the variables path
+    # (export_path + 'variables/variables').
+    if tf.compat.v1.gfile.Exists(
+        os.path.join(
+            saved_model_utils.get_variables_dir(warm_start_from),
+            tf.compat.as_text('variables.index'))):
+      tf.compat.v1.logging.info('Warm-starting from a SavedModel')
+      return WarmStartSettings(
+          ckpt_to_initialize_from=saved_model_utils.get_variables_path(
+              warm_start_from))
+    return WarmStartSettings(ckpt_to_initialize_from=warm_start_from)
+  elif isinstance(warm_start_from, WarmStartSettings):
+    return warm_start_from
+  else:
+    raise ValueError('warm_start_from must be a string or a WarmStartSettings, '
+                     'instead got {}'.format(type(warm_start_from)))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator_lib.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator_lib.py
new file mode 100644
index 00000000..6ed8102a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator_lib.py
@@ -0,0 +1,74 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Estimator: High level tools for working with models."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+# pylint: disable=unused-import,line-too-long,wildcard-import
+from tensorflow_estimator.python.estimator.canned.baseline import BaselineClassifier
+from tensorflow_estimator.python.estimator.canned.baseline import BaselineEstimator
+from tensorflow_estimator.python.estimator.canned.baseline import BaselineRegressor
+from tensorflow_estimator.python.estimator.canned.boosted_trees import BoostedTreesClassifier
+from tensorflow_estimator.python.estimator.canned.boosted_trees import BoostedTreesRegressor
+from tensorflow_estimator.python.estimator.canned.dnn import dnn_logit_fn_builder
+from tensorflow_estimator.python.estimator.canned.dnn import DNNClassifier
+from tensorflow_estimator.python.estimator.canned.dnn import DNNEstimator
+from tensorflow_estimator.python.estimator.canned.dnn import DNNRegressor
+from tensorflow_estimator.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedClassifier
+from tensorflow_estimator.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedEstimator
+from tensorflow_estimator.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedRegressor
+from tensorflow_estimator.python.estimator.canned.kmeans import KMeansClustering
+from tensorflow_estimator.python.estimator.canned.linear import linear_logit_fn_builder
+from tensorflow_estimator.python.estimator.canned.linear import LinearClassifier
+from tensorflow_estimator.python.estimator.canned.linear import LinearEstimator
+from tensorflow_estimator.python.estimator.canned.linear import LinearRegressor
+from tensorflow_estimator.python.estimator.canned.parsing_utils import classifier_parse_example_spec
+from tensorflow_estimator.python.estimator.canned.parsing_utils import regressor_parse_example_spec
+from tensorflow_estimator.python.estimator.canned.rnn import RNNClassifier
+from tensorflow_estimator.python.estimator.canned.rnn import RNNEstimator
+from tensorflow_estimator.python.estimator.early_stopping import *
+from tensorflow_estimator.python.estimator.estimator import Estimator
+from tensorflow_estimator.python.estimator.estimator import VocabInfo
+from tensorflow_estimator.python.estimator.estimator import WarmStartSettings
+from tensorflow_estimator.python.estimator.export import export_lib as export
+from tensorflow_estimator.python.estimator.exporter import Exporter
+from tensorflow_estimator.python.estimator.exporter import FinalExporter
+from tensorflow_estimator.python.estimator.exporter import LatestExporter
+from tensorflow_estimator.python.estimator.extenders import add_metrics
+from tensorflow_estimator.python.estimator.head.base_head import Head
+from tensorflow_estimator.python.estimator.head.binary_class_head import BinaryClassHead
+from tensorflow_estimator.python.estimator.head.multi_class_head import MultiClassHead
+from tensorflow_estimator.python.estimator.head.multi_head import MultiHead
+from tensorflow_estimator.python.estimator.head.multi_label_head import MultiLabelHead
+from tensorflow_estimator.python.estimator.head.regression_head import LogisticRegressionHead
+from tensorflow_estimator.python.estimator.head.regression_head import PoissonRegressionHead
+from tensorflow_estimator.python.estimator.head.regression_head import RegressionHead
+from tensorflow_estimator.python.estimator.hooks import basic_session_run_hooks
+from tensorflow_estimator.python.estimator.hooks import hooks
+from tensorflow_estimator.python.estimator.hooks import session_run_hook
+from tensorflow_estimator.python.estimator.inputs import inputs
+from tensorflow_estimator.python.estimator.keras import model_to_estimator
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+from tensorflow_estimator.python.estimator.model_fn import call_logit_fn
+from tensorflow_estimator.python.estimator.model_fn import EstimatorSpec
+from tensorflow_estimator.python.estimator.run_config import RunConfig
+from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimator
+from tensorflow_estimator.python.estimator.training import EvalSpec
+from tensorflow_estimator.python.estimator.training import train_and_evaluate
+from tensorflow_estimator.python.estimator.training import TrainSpec
+
+# pylint: enable=unused-import,line-too-long,wildcard-import
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export.py
new file mode 100644
index 00000000..23d16a1b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export.py
@@ -0,0 +1,484 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Configuration and utilities for receiving inputs at serving time.
+
+Extends the export utils defined in core TensorFlow.
+
+Please avoid importing this file directly, all of the public functions have
+been exported to export_lib.py.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+import six
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.saved_model.model_utils import export_utils
+from tensorflow.python.saved_model.model_utils.export_utils import SINGLE_FEATURE_DEFAULT_NAME
+from tensorflow.python.saved_model.model_utils.export_utils import SINGLE_LABEL_DEFAULT_NAME
+from tensorflow.python.saved_model.model_utils.export_utils import SINGLE_RECEIVER_DEFAULT_NAME
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import util
+
+_SINGLE_TENSOR_DEFAULT_NAMES = {
+    'feature': SINGLE_FEATURE_DEFAULT_NAME,
+    'label': SINGLE_LABEL_DEFAULT_NAME,
+    'receiver_tensor': SINGLE_RECEIVER_DEFAULT_NAME,
+    'receiver_tensors_alternative': SINGLE_RECEIVER_DEFAULT_NAME
+}
+
+
+def wrap_and_check_input_tensors(tensors, field_name, allow_int_keys=False):
+  """Ensure that tensors is a dict of str to Tensor mappings.
+
+  Args:
+    tensors: dict of `str` (or `int`s if `allow_int_keys=True`) to `Tensors`, or
+      a single `Tensor`.
+    field_name: name of the member field of `ServingInputReceiver` whose value
+      is being passed to `tensors`.
+    allow_int_keys: If set to true, the `tensor` dict keys may also be `int`s.
+
+  Returns:
+    dict of str to Tensors; this is the original dict if one was passed, or
+    the original tensor wrapped in a dictionary.
+
+  Raises:
+    ValueError: if tensors is None, or has non-string keys,
+      or non-Tensor values
+  """
+  if tensors is None:
+    raise ValueError('{}s must be defined.'.format(field_name))
+  if not isinstance(tensors, dict):
+    tensors = {_SINGLE_TENSOR_DEFAULT_NAMES[field_name]: tensors}
+  for name, tensor in tensors.items():
+    _check_tensor_key(name, error_label=field_name, allow_ints=allow_int_keys)
+    _check_tensor(tensor, name, error_label=field_name)
+  return tensors
+
+
+def _check_tensor(tensor, name, error_label='feature'):
+  """Check that passed `tensor` is a Tensor or SparseTensor or RaggedTensor."""
+  if not (isinstance(tensor, tf.Tensor) or
+          isinstance(tensor, tf.sparse.SparseTensor) or
+          isinstance(tensor, tf.RaggedTensor)):
+    fmt_name = ' {}'.format(name) if name else ''
+    value_error = ValueError('{}{} must be a Tensor, SparseTensor, or '
+                             'RaggedTensor.'.format(error_label, fmt_name))
+    # NOTE(ericmc): This if-else block is a specific carve-out for
+    # LabeledTensor, which has a `.tensor` attribute and which is
+    # convertible to tf.Tensor via ops.convert_to_tensor.
+    # Allowing all types convertible to tf.Tensor is considered by soergel@
+    # to be too permissive.
+    # TODO(soergel): accept any type convertible to Tensor,
+    # as in cl/193238295 snapshot #6.
+    if hasattr(tensor, 'tensor'):
+      try:
+        ops.convert_to_tensor(tensor)
+      except TypeError:
+        raise value_error
+    else:
+      raise value_error
+
+
+def _check_tensor_key(name, error_label='feature', allow_ints=False):
+  if not isinstance(name, six.string_types):
+    if not allow_ints:
+      raise ValueError('{} keys must be strings: {}.'.format(error_label, name))
+    elif not isinstance(name, six.integer_types):
+      raise ValueError('{} keys must be strings or ints: {}.'.format(
+          error_label, name))
+
+
+@estimator_export('estimator.export.ServingInputReceiver')
+class ServingInputReceiver(
+    collections.namedtuple(
+        'ServingInputReceiver',
+        ['features', 'receiver_tensors', 'receiver_tensors_alternatives'])):
+  """A return type for a serving_input_receiver_fn.
+
+  Attributes:
+    features: A `Tensor`, `SparseTensor`, or dict of string or int to `Tensor`
+      or `SparseTensor`, specifying the features to be passed to the model.
+      Note: if `features` passed is not a dict, it will be wrapped in a dict
+        with a single entry, using 'feature' as the key.  Consequently, the
+        model
+      must accept a feature dict of the form {'feature': tensor}.  You may use
+        `TensorServingInputReceiver` if you want the tensor to be passed as is.
+    receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
+      or `SparseTensor`, specifying input nodes where this receiver expects to
+      be fed by default.  Typically, this is a single placeholder expecting
+      serialized `tf.Example` protos.
+    receiver_tensors_alternatives: a dict of string to additional groups of
+      receiver tensors, each of which may be a `Tensor`, `SparseTensor`, or dict
+      of string to `Tensor` or`SparseTensor`. These named receiver tensor
+      alternatives generate additional serving signatures, which may be used to
+      feed inputs at different points within the input receiver subgraph.  A
+      typical usage is to allow feeding raw feature `Tensor`s *downstream* of
+      the tf.parse_example() op. Defaults to None.
+  """
+
+  def __new__(cls,
+              features,
+              receiver_tensors,
+              receiver_tensors_alternatives=None):
+    features = wrap_and_check_input_tensors(
+        features, 'feature', allow_int_keys=True)
+
+    receiver_tensors = wrap_and_check_input_tensors(receiver_tensors,
+                                                    'receiver_tensor')
+
+    if receiver_tensors_alternatives is not None:
+      if not isinstance(receiver_tensors_alternatives, dict):
+        raise ValueError(
+            'receiver_tensors_alternatives must be a dict: {}.'.format(
+                receiver_tensors_alternatives))
+      for alternative_name, receiver_tensors_alt in (
+          six.iteritems(receiver_tensors_alternatives)):
+        # Updating dict during iteration is OK in this case.
+        receiver_tensors_alternatives[alternative_name] = (
+            wrap_and_check_input_tensors(receiver_tensors_alt,
+                                         'receiver_tensors_alternative'))
+
+    return super(ServingInputReceiver, cls).__new__(
+        cls,
+        features=features,
+        receiver_tensors=receiver_tensors,
+        receiver_tensors_alternatives=receiver_tensors_alternatives)
+
+
+@estimator_export('estimator.export.TensorServingInputReceiver')
+class TensorServingInputReceiver(
+    collections.namedtuple(
+        'TensorServingInputReceiver',
+        ['features', 'receiver_tensors', 'receiver_tensors_alternatives'])):
+  """A return type for a serving_input_receiver_fn.
+
+  This is for use with models that expect a single `Tensor` or `SparseTensor`
+  as an input feature, as opposed to a dict of features.
+
+  The normal `ServingInputReceiver` always returns a feature dict, even if it
+  contains only one entry, and so can be used only with models that accept such
+  a dict.  For models that accept only a single raw feature, the
+  `serving_input_receiver_fn` provided to `Estimator.export_saved_model()`
+  should return this `TensorServingInputReceiver` instead.  See:
+  https://github.com/tensorflow/tensorflow/issues/11674
+
+  Note that the receiver_tensors and receiver_tensor_alternatives arguments
+  will be automatically converted to the dict representation in either case,
+  because the SavedModel format requires each input `Tensor` to have a name
+  (provided by the dict key).
+
+  Attributes:
+    features: A single `Tensor` or `SparseTensor`, representing the feature to
+      be passed to the model.
+    receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
+      or `SparseTensor`, specifying input nodes where this receiver expects to
+      be fed by default.  Typically, this is a single placeholder expecting
+      serialized `tf.Example` protos.
+    receiver_tensors_alternatives: a dict of string to additional groups of
+      receiver tensors, each of which may be a `Tensor`, `SparseTensor`, or dict
+      of string to `Tensor` or`SparseTensor`. These named receiver tensor
+      alternatives generate additional serving signatures, which may be used to
+      feed inputs at different points within the input receiver subgraph.  A
+      typical usage is to allow feeding raw feature `Tensor`s *downstream* of
+      the tf.parse_example() op. Defaults to None.
+  """
+
+  def __new__(cls,
+              features,
+              receiver_tensors,
+              receiver_tensors_alternatives=None):
+    if features is None:
+      raise ValueError('features must be defined.')
+    _check_tensor(features, None)
+
+    receiver = ServingInputReceiver(
+        features=features,
+        receiver_tensors=receiver_tensors,
+        receiver_tensors_alternatives=receiver_tensors_alternatives)
+
+    return super(TensorServingInputReceiver, cls).__new__(
+        cls,
+        features=receiver.features[SINGLE_FEATURE_DEFAULT_NAME],
+        receiver_tensors=receiver.receiver_tensors,
+        receiver_tensors_alternatives=receiver.receiver_tensors_alternatives)
+
+
+class UnsupervisedInputReceiver(ServingInputReceiver):
+  """A return type for a training_input_receiver_fn or eval_input_receiver_fn.
+
+  This differs from SupervisedInputReceiver in that it does not require a set
+  of labels.
+
+  Attributes:
+    features: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
+      `SparseTensor`, specifying the features to be passed to the model.
+    receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
+      or `SparseTensor`, specifying input nodes where this receiver expects to
+      be fed by default.  Typically, this is a single placeholder expecting
+      serialized `tf.Example` protos.
+  """
+
+  def __new__(cls, features, receiver_tensors):
+    return super(UnsupervisedInputReceiver, cls).__new__(
+        cls,
+        features=features,
+        receiver_tensors=receiver_tensors,
+        receiver_tensors_alternatives=None)
+
+
+class SupervisedInputReceiver(
+    collections.namedtuple('SupervisedInputReceiver',
+                           ['features', 'labels', 'receiver_tensors'])):
+  """A return type for a training_input_receiver_fn or eval_input_receiver_fn.
+
+  This differs from a ServingInputReceiver in that (1) this receiver expects
+  a set of labels to be passed in with features, and (2) this receiver does
+  not support receiver_tensors_alternatives, which are primarily used for
+  serving.
+
+  The expected return values are:
+    features: A `Tensor`, `SparseTensor`, or dict of string or int to `Tensor`
+      or `SparseTensor`, specifying the features to be passed to the model.
+    labels: A `Tensor`, `SparseTensor`, or dict of string or int to `Tensor` or
+      `SparseTensor`, specifying the labels to be passed to the model.
+    receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
+      or `SparseTensor`, specifying input nodes where this receiver expects to
+      be fed by default.  Typically, this is a single placeholder expecting
+      serialized `tf.Example` protos.
+
+  """
+
+  def __new__(cls, features, labels, receiver_tensors):
+    # Both features and labels can be dicts or raw tensors.
+    # wrap_and_check_input_tensors is called here only to validate the tensors.
+    # The wrapped dict that is returned is deliberately discarded.
+    wrap_and_check_input_tensors(features, 'feature', allow_int_keys=True)
+    wrap_and_check_input_tensors(labels, 'label', allow_int_keys=True)
+
+    receiver_tensors = wrap_and_check_input_tensors(receiver_tensors,
+                                                    'receiver_tensor')
+
+    return super(SupervisedInputReceiver, cls).__new__(
+        cls,
+        features=features,
+        labels=labels,
+        receiver_tensors=receiver_tensors)
+
+
+@estimator_export('estimator.export.build_parsing_serving_input_receiver_fn')
+def build_parsing_serving_input_receiver_fn(feature_spec,
+                                            default_batch_size=None):
+  """Build a serving_input_receiver_fn expecting fed tf.Examples.
+
+  Creates a serving_input_receiver_fn that expects a serialized tf.Example fed
+  into a string placeholder.  The function parses the tf.Example according to
+  the provided feature_spec, and returns all parsed Tensors as features.
+
+  Args:
+    feature_spec: a dict of string to `VarLenFeature`/`FixedLenFeature`.
+    default_batch_size: the number of query examples expected per batch. Leave
+      unset for variable batch size (recommended).
+
+  Returns:
+    A serving_input_receiver_fn suitable for use in serving.
+  """
+
+  def serving_input_receiver_fn():
+    """An input_fn that expects a serialized tf.Example."""
+    serialized_tf_example = tf.compat.v1.placeholder(
+        dtype=tf.dtypes.string,
+        shape=[default_batch_size],
+        name='input_example_tensor')
+    receiver_tensors = {'examples': serialized_tf_example}
+    features = tf.compat.v1.io.parse_example(serialized_tf_example,
+                                             feature_spec)
+    return ServingInputReceiver(features, receiver_tensors)
+
+  return serving_input_receiver_fn
+
+
+def _placeholder_from_tensor(t, default_batch_size=None):
+  """Creates a placeholder that matches the dtype and shape of passed tensor.
+
+  Args:
+    t: Tensor or EagerTensor
+    default_batch_size: the number of query examples expected per batch. Leave
+      unset for variable batch size (recommended).
+
+  Returns:
+    Placeholder that matches the passed tensor.
+  """
+  batch_shape = tf.TensorShape([default_batch_size])
+  shape = batch_shape.concatenate(t.get_shape()[1:])
+
+  # Reuse the feature tensor's op name (t.op.name) for the placeholder,
+  # excluding the index from the tensor's name (t.name):
+  # t.name = "%s:%d" % (t.op.name, t._value_index)
+  try:
+    name = t.op.name
+  except AttributeError:
+    # In Eager mode, tensors don't have ops or names, and while they do have
+    # IDs, those are not maintained across runs. The name here is used
+    # primarily for debugging, and is not critical to the placeholder.
+    # So, in order to make this Eager-compatible, continue with an empty
+    # name if none is available.
+    name = None
+
+  return tf.compat.v1.placeholder(dtype=t.dtype, shape=shape, name=name)
+
+
+def _placeholders_from_receiver_tensors_dict(input_vals,
+                                             default_batch_size=None):
+  return {
+      name: _placeholder_from_tensor(t, default_batch_size)
+      for name, t in input_vals.items()
+  }
+
+
+@estimator_export('estimator.export.build_raw_serving_input_receiver_fn')
+def build_raw_serving_input_receiver_fn(features, default_batch_size=None):
+  """Build a serving_input_receiver_fn expecting feature Tensors.
+
+  Creates an serving_input_receiver_fn that expects all features to be fed
+  directly.
+
+  Args:
+    features: a dict of string to `Tensor`.
+    default_batch_size: the number of query examples expected per batch. Leave
+      unset for variable batch size (recommended).
+
+  Returns:
+    A serving_input_receiver_fn.
+  """
+
+  def serving_input_receiver_fn():
+    """A serving_input_receiver_fn that expects features to be fed directly."""
+    receiver_tensors = _placeholders_from_receiver_tensors_dict(
+        features, default_batch_size)
+    return ServingInputReceiver(receiver_tensors, receiver_tensors)
+
+  return serving_input_receiver_fn
+
+
+@estimator_export(
+    'estimator.experimental.build_raw_supervised_input_receiver_fn')
+def build_raw_supervised_input_receiver_fn(features,
+                                           labels,
+                                           default_batch_size=None):
+  """Build a supervised_input_receiver_fn for raw features and labels.
+
+  This function wraps tensor placeholders in a supervised_receiver_fn
+  with the expectation that the features and labels appear precisely as
+  the model_fn expects them. Features and labels can therefore be dicts of
+  tensors, or raw tensors.
+
+  Args:
+    features: a dict of string to `Tensor` or `Tensor`.
+    labels: a dict of string to `Tensor` or `Tensor`.
+    default_batch_size: the number of query examples expected per batch. Leave
+      unset for variable batch size (recommended).
+
+  Returns:
+    A supervised_input_receiver_fn.
+
+  Raises:
+    ValueError: if features and labels have overlapping keys.
+  """
+  # Check for overlapping keys before beginning.
+  try:
+    feat_keys = features.keys()
+  except AttributeError:
+    feat_keys = [SINGLE_RECEIVER_DEFAULT_NAME]
+  try:
+    label_keys = labels.keys()
+  except AttributeError:
+    label_keys = [SINGLE_LABEL_DEFAULT_NAME]
+
+  overlap_keys = set(feat_keys) & set(label_keys)
+  if overlap_keys:
+    raise ValueError('Features and labels must have distinct keys. '
+                     'Found overlapping keys: {}'.format(overlap_keys))
+
+  def supervised_input_receiver_fn():
+    """A receiver_fn that expects pass-through features and labels."""
+    if not isinstance(features, dict):
+      features_cp = _placeholder_from_tensor(features, default_batch_size)
+      receiver_features = {SINGLE_RECEIVER_DEFAULT_NAME: features_cp}
+    else:
+      receiver_features = _placeholders_from_receiver_tensors_dict(
+          features, default_batch_size)
+      features_cp = receiver_features
+
+    if not isinstance(labels, dict):
+      labels_cp = _placeholder_from_tensor(labels, default_batch_size)
+      receiver_labels = {SINGLE_LABEL_DEFAULT_NAME: labels_cp}
+    else:
+      receiver_labels = _placeholders_from_receiver_tensors_dict(
+          labels, default_batch_size)
+      labels_cp = receiver_labels
+
+    receiver_tensors = dict(receiver_features)
+    receiver_tensors.update(receiver_labels)
+    return SupervisedInputReceiver(features_cp, labels_cp, receiver_tensors)
+
+  return supervised_input_receiver_fn
+
+
+def build_supervised_input_receiver_fn_from_input_fn(input_fn, **input_fn_args):
+  """Get a function that returns a SupervisedInputReceiver matching an input_fn.
+
+  Note that this function calls the input_fn in a local graph in order to
+  extract features and labels. Placeholders are then created from those
+  features and labels in the default graph.
+
+  Args:
+    input_fn: An Estimator input_fn, which is a function that returns one of:
+      * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a tuple
+        (features, labels) with same constraints as below.
+      * A tuple (features, labels): Where `features` is a `Tensor` or a
+        dictionary of string feature name to `Tensor` and `labels` is a `Tensor`
+        or a dictionary of string label name to `Tensor`. Both `features` and
+        `labels` are consumed by `model_fn`. They should satisfy the expectation
+        of `model_fn` from inputs.
+    **input_fn_args: set of kwargs to be passed to the input_fn. Note that these
+      will not be checked or validated here, and any errors raised by the
+      input_fn will be thrown to the top.
+
+  Returns:
+    A function taking no arguments that, when called, returns a
+    SupervisedInputReceiver. This function can be passed in as part of the
+    input_receiver_map when exporting SavedModels from Estimator with multiple
+    modes.
+  """
+  # Wrap the input_fn call in a graph to prevent sullying the default namespace
+  with tf.Graph().as_default():
+    result = input_fn(**input_fn_args)
+    features, labels, _ = util.parse_input_fn_result(result)
+  # Placeholders are created back in the default graph.
+  return build_raw_supervised_input_receiver_fn(features, labels)
+
+
+### Below utilities are specific to SavedModel exports.
+# TODO(kathywu): Rename all references to use the original definition in
+# model_utils, or estimator/export/export_lib.py if other estimator export
+# functions are used.
+build_all_signature_defs = export_utils.build_all_signature_defs
+get_temp_export_dir = export_utils.get_temp_export_dir
+get_timestamped_export_dir = export_utils.get_timestamped_export_dir
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_lib.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_lib.py
new file mode 100644
index 00000000..167e6ece
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_lib.py
@@ -0,0 +1,48 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""All public utility methods for exporting Estimator to SavedModel.
+
+This file includes functions and constants from core (model_utils) and export.py
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+# pylint: disable=unused-import,line-too-long, wildcard-import
+from tensorflow.python.saved_model.model_utils import build_all_signature_defs
+from tensorflow.python.saved_model.model_utils import export_outputs_for_mode
+from tensorflow.python.saved_model.model_utils import EXPORT_TAG_MAP
+from tensorflow.python.saved_model.model_utils import get_export_outputs
+from tensorflow.python.saved_model.model_utils import get_temp_export_dir
+from tensorflow.python.saved_model.model_utils import get_timestamped_export_dir
+from tensorflow.python.saved_model.model_utils import SIGNATURE_KEY_MAP
+from tensorflow.python.saved_model.model_utils.export_output import _SupervisedOutput
+from tensorflow.python.saved_model.model_utils.export_output import ClassificationOutput
+from tensorflow.python.saved_model.model_utils.export_output import EvalOutput
+from tensorflow.python.saved_model.model_utils.export_output import ExportOutput
+from tensorflow.python.saved_model.model_utils.export_output import PredictOutput
+from tensorflow.python.saved_model.model_utils.export_output import RegressionOutput
+from tensorflow.python.saved_model.model_utils.export_output import TrainOutput
+from tensorflow_estimator.python.estimator.export.export import build_parsing_serving_input_receiver_fn
+from tensorflow_estimator.python.estimator.export.export import build_raw_serving_input_receiver_fn
+from tensorflow_estimator.python.estimator.export.export import build_raw_supervised_input_receiver_fn
+from tensorflow_estimator.python.estimator.export.export import build_supervised_input_receiver_fn_from_input_fn
+from tensorflow_estimator.python.estimator.export.export import ServingInputReceiver
+from tensorflow_estimator.python.estimator.export.export import SupervisedInputReceiver
+from tensorflow_estimator.python.estimator.export.export import TensorServingInputReceiver
+from tensorflow_estimator.python.estimator.export.export import UnsupervisedInputReceiver
+from tensorflow_estimator.python.estimator.export.export import wrap_and_check_input_tensors
+# pylint: enable=unused-import,line-too-long, wildcard-import
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_output.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_output.py
new file mode 100644
index 00000000..701a57c7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/export_output.py
@@ -0,0 +1,36 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Classes for different types of export output."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+# pylint: disable=unused-import
+from tensorflow.python.saved_model.model_utils.export_output import _SupervisedOutput
+from tensorflow.python.saved_model.model_utils.export_output import ClassificationOutput
+from tensorflow.python.saved_model.model_utils.export_output import EvalOutput
+from tensorflow.python.saved_model.model_utils.export_output import ExportOutput
+from tensorflow.python.saved_model.model_utils.export_output import PredictOutput
+from tensorflow.python.saved_model.model_utils.export_output import RegressionOutput
+from tensorflow.python.saved_model.model_utils.export_output import TrainOutput
+# pylint: enable=unused-import
+from tensorflow.python.util.tf_export import estimator_export
+
+estimator_export('estimator.export.ExportOutput')(ExportOutput)
+estimator_export('estimator.export.ClassificationOutput')(ClassificationOutput)
+estimator_export('estimator.export.RegressionOutput')(RegressionOutput)
+estimator_export('estimator.export.PredictOutput')(PredictOutput)
+estimator_export('estimator.export.EvalOutput')(EvalOutput)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/function.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/function.py
new file mode 100644
index 00000000..f8382608
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/export/function.py
@@ -0,0 +1,400 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Defines class for wrapping an Estimator model function."""
+# TODO(kathywu): support remaining outputs from the EstimatorSpec.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.eager import def_function
+from tensorflow.python.eager import function
+from tensorflow.python.eager import wrap_function
+from tensorflow.python.framework import func_graph
+from tensorflow.python.saved_model.model_utils import export_utils
+from tensorflow.python.training.tracking import tracking
+from tensorflow.python.util import function_utils
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+class ModelFunction(tracking.AutoTrackable):
+  """A checkpointable ModelFunction object.
+
+  This object stores a global mapping of variables and functions for each mode.
+  """
+
+  def __init__(self, config=None, params=None):
+    self._config = config
+    self._params = params
+    self._functions = {}
+
+    self._variable_holder = wrap_function.VariableHolder(share_variables=True)
+
+    # Add reference to the variable holder's mapping of variables, which is a
+    # trackable object.
+    self._variables_by_name = self._variable_holder.variables
+
+  @staticmethod
+  def from_function(model_fn, all_modes=None, config=None, params=None):
+    """Creates a new ModelFunction object from a model function."""
+    if all_modes is None:
+      all_modes = [ModeKeys.TRAIN, ModeKeys.EVAL, ModeKeys.PREDICT]
+    else:
+      all_modes = list(all_modes)
+
+    obj = ModelFunction(config=config, params=params)
+    for mode in all_modes:
+      obj.add_mode(model_fn, mode)
+    return obj
+
+  @property
+  def variables(self):
+    return self._variables_by_name
+
+  def add_mode(self, fn, mode, input_signature=None):
+    if mode in self._functions:
+      raise ValueError('ModelFunction object has multiple functions with name'
+                       ' {}.'.format(mode))
+
+    spec_fn = EstimatorSpecFunction(
+        fn,
+        mode,
+        config=self._config,
+        params=self._params,
+        variable_holder=self._variable_holder,
+        input_signature=input_signature)
+
+    self._functions[mode] = spec_fn
+
+  def train(self, features, labels):
+    return self.call(ModeKeys.TRAIN, features, labels)
+
+  def evaluate(self, features, labels):
+    return self.call(ModeKeys.EVAL, features, labels)
+
+  def predict(self, features):
+    return self.call(ModeKeys.PREDICT, features)
+
+  def call(self, mode, features, labels=None):
+    if mode not in self._functions:
+      raise ValueError(
+          'Mode {} is not defined the ModelFunction. To add modes,'
+          ' use the `add_mode()` function. Available modes: {}'.format(
+              mode, self._functions.keys()))
+    fn = self._functions[mode]
+    if fn.expects_labels:
+      return fn(features, labels)
+    else:
+      return fn(features)
+
+
+def _wrap_and_verify_model_fn(model_fn,
+                              mode=None,
+                              config=None,
+                              params=None,
+                              input_signature=None):
+  """Returns a function that only has only tensor arguments (features, labels).
+
+  Args:
+    model_fn: Model function. Must follow the signature defined in
+      `tf.estimator.Estimator`.
+    mode: Optional string `tf.estimstor.ModeKey`.
+    config: Optional `estimator.RunConfig` object.
+    params: Optional `dict` of hyperparameters.
+    input_signature: Possibly nested TensorSpec of the tensor arguments.
+
+  Returns:
+    tuple of (
+      function that only accepts tensor arguments (features and/or labels),
+      whether the returned function expects a labels argument)
+  """
+  model_fn_lib.verify_model_fn_args(model_fn, params)
+  args = function_utils.fn_args(model_fn)
+  kwargs = {}
+  if 'mode' in args:
+    kwargs['mode'] = mode
+  if 'params' in args:
+    kwargs['params'] = params
+  if 'config' in args:
+    kwargs['config'] = config
+
+  if 'labels' in args:
+    if input_signature is None or len(input_signature) == 2:
+
+      def wrapped_model_fn(features, labels=None):
+        return model_fn(features=features, labels=labels, **kwargs)
+    else:
+
+      def wrapped_model_fn(features):
+        return model_fn(features=features, labels=None, **kwargs)
+  else:
+
+    def wrapped_model_fn(features):
+      return model_fn(features=features, **kwargs)
+
+  return wrapped_model_fn, 'labels' in args
+
+
+class EstimatorSpecFunction(def_function.Function):
+  """Wraps graph functions defined for a function returning an EstimatorSpec.
+
+  Instances of this class are revivable when attached to a checkpointable
+  object.
+  """
+
+  def __init__(self,
+               fn,
+               mode,
+               config=None,
+               params=None,
+               variable_holder=None,
+               **kwargs):
+    """Initializes an EstimatorSpecFunction.
+
+    Args:
+      fn: Python model function.
+      mode: String mode to run the function.
+      config: RunConfig that is passed to the `config` arg in the function.
+      params: object that is passed to the `params` argument in the function.
+      variable_holder: Optional `wrap_function.VariableHolder` object.
+      **kwargs: Optional keyword arguments to pass to tf.function (e.g.
+        input_signature).
+    """
+    python_function, self.expects_labels = _wrap_and_verify_model_fn(
+        fn,
+        mode=mode,
+        config=config,
+        params=params,
+        input_signature=kwargs.get('input_signature', None))
+    super(EstimatorSpecFunction, self).__init__(python_function, mode, **kwargs)
+    self._variable_holder = variable_holder
+
+  def _defun(self, fn):
+    return _EstimatorSpecFunction(
+        fn,
+        name=self._name,
+        variable_holder=self._variable_holder,
+        input_signature=self.input_signature,
+        autograph=self._autograph,
+        autograph_options=self._experimental_autograph_options)
+
+
+class _EstimatorSpecFunction(function.Function):
+  """Wraps graph functions defined for a function returning an EstimatorSpec.
+
+  This object handles creation of the graph functions.
+  """
+
+  def __init__(self, python_function, name, variable_holder=None, **kwargs):
+    super(_EstimatorSpecFunction, self).__init__(python_function, name,
+                                                 **kwargs)
+    self._variable_holder = variable_holder
+
+  def _create_graph_function(self, args, kwargs, **other_kwargs):
+    _ = other_kwargs
+    wrapped_graph = _EstimatorWrappedGraph(self._variable_holder)
+    return wrapped_graph.wrap_model_fn(
+        self._python_function,
+        self._name,
+        signature=self.input_signature,
+        args=args,
+        kwargs=kwargs)
+
+
+class _EstimatorWrappedGraph(wrap_function.WrappedGraph):
+  """WrappedGraph that handles global step creation and wraps estimator fns."""
+
+  def __init__(self, *args, **kwargs):
+    super(_EstimatorWrappedGraph, self).__init__(*args, **kwargs)
+    # Create global step variable, which may be used by the input and model fns.
+    self._global_step_read_fn = self.wrap_function(
+        self._global_step, signature=[])
+
+    self._concrete_model_fn = None
+
+    # Original EstimatorSpec object returned by the model function. Only tensors
+    # and ops are returned by the concrete model function.
+    self._estimator_spec = None
+
+  def _global_step(self):
+    return tf.compat.v1.train.get_or_create_global_step()
+
+  @property
+  def global_step(self):
+    return self._global_step_read_fn()
+
+  @property
+  def model_fn(self):
+    return self._concrete_model_fn
+
+  @property
+  def estimator_spec(self):
+    if self._concrete_model_fn is None:
+      raise ValueError('Please wrap a model function first.')
+    return self._estimator_spec
+
+  def wrap_model_fn(self,
+                    model_fn,
+                    mode,
+                    args=None,
+                    kwargs=None,
+                    signature=None):
+    """Wraps a model function, and stores the returned estimator spec."""
+    if self._concrete_model_fn is not None:
+      raise ValueError('`wrap_model_fn` should be only called once per graph.')
+
+    def fn(*args, **kwargs):
+      """Returns tensor and op outputs from the returned spec."""
+      ret = model_fn(*args, **kwargs)
+
+      if isinstance(ret, model_fn_lib.EstimatorSpec):
+        self._estimator_spec = ret
+        return _filter_estimator_spec_outputs(ret)
+      return ret
+
+    name = 'model_fn_{}'.format(mode)
+    self._concrete_model_fn = self._wrap_function(fn, args, kwargs, signature,
+                                                  name)
+    return self._concrete_model_fn
+
+  def wrap_input_receiver_fn(self, input_receiver_fn):
+    """Converts an input receiver function to one or more concrete functions.
+
+    Input receiver functions are python functions with no arguments.
+    Placeholders are created within the function and used to receive inputs to
+    the model.
+
+    The function (or multiple functions) generated depends on the InputReceiver
+    object returned by `input_receiver_fn`.
+
+    Generally, the returned function will have inputs and outputs:
+      input_receiver(**receiver_tensors) --> features
+
+    or (if the InputReceiver returns labels):
+      input_receiver(**receiver_tensors) --> features, labels
+
+    __Alternate Receiver Tensors__
+
+    The InputReceiver may have alternate receiver tensors, in which case
+    additional concrete functions are generated. Example:
+      InputReceiver.receiver_tensors_alternatives = {
+        'alt_input_1': Tensor,
+        'alt_input_2': {
+          'tensor_1': Tensor,
+          'tensor_2': Tensor
+        }
+      }
+
+    This will generate concrete functions:
+      input_receiver_alt_input_1(input) --> features
+      input_receiver_alt_input_2(tensor_1, tensor_2) --> features
+
+    Args:
+      input_receiver_fn: a no-argument function that returns an `InputReceiver`
+        object.
+
+    Returns:
+      A list of tuples of (concrete function, receiver name). The name of the
+      default input receiver is `None`.
+    """
+    ret = [None]
+
+    def fn():
+      ret[0] = input_receiver = input_receiver_fn()
+      features = input_receiver.features
+      labels = getattr(input_receiver, 'labels', None)
+
+      if labels is None:
+        return features
+      return features, labels
+
+    func_graph.func_graph_from_py_func(
+        None,  # Name is unused.
+        self._variable_holder.call_with_variable_creator_scope(fn),
+        args=None,
+        kwargs=None,
+        signature=[],
+        add_control_dependencies=False,
+        func_graph=self.graph)
+
+    functions = []
+    input_receiver = ret[0]
+
+    wrapped_input_receiver_fn = _prune_receiver_tensors(
+        self._wrapped_function,
+        receiver_tensors=input_receiver.receiver_tensors,
+        outputs=self.graph.structured_outputs,
+        name=_input_receiver_fn_name(None))
+    functions.append((wrapped_input_receiver_fn, None))
+
+    receiver_tensors_alternatives = getattr(input_receiver,
+                                            'receiver_tensors_alternatives',
+                                            None)
+
+    if receiver_tensors_alternatives:
+      for receiver_name, receiver_tensors_alt in (
+          six.iteritems(receiver_tensors_alternatives)):
+        receiver_tensors_alt = _canonicalize_receiver_tensors(
+            receiver_tensors_alt)
+        wrapped_input_receiver_fn = _prune_receiver_tensors(
+            self._wrapped_function,
+            receiver_tensors=receiver_tensors_alt,
+            outputs=self.graph.structured_outputs,
+            name=_input_receiver_fn_name(receiver_name))
+        functions.append((wrapped_input_receiver_fn, receiver_name))
+    return functions
+
+
+def _filter_estimator_spec_outputs(spec):
+  """Filters tensors and ops from an EstimatorSpec and returns a dictionary."""
+  # TODO(kathywu): Add loss, export outputs, eval metrics depending on the mode.
+  if spec.mode == ModeKeys.TRAIN:
+    return dict(predictions=spec.predictions, train_op=spec.train_op)
+  return dict(predictions=spec.predictions)
+
+
+_RECEIVER_FN_NAME = '_input_receiver'
+
+
+def _canonicalize_receiver_tensors(receiver_tensors):
+  """Converts receiver tensors to the expected format of `as_signature_def`."""
+  # TODO(b/129646028): Wrap function doesn't support composite tensors.
+  for tensor in tf.nest.flatten(receiver_tensors):
+    if not isinstance(tensor, tf.Tensor):
+      raise ValueError('All receiver tensors must be tensors (composite '
+                       'tensors are not yet supported).')
+
+  if isinstance(receiver_tensors, dict):
+    return receiver_tensors
+  return {export_utils.SINGLE_RECEIVER_DEFAULT_NAME: receiver_tensors}
+
+
+def _input_receiver_fn_name(name):
+  if name is None:
+    return _RECEIVER_FN_NAME
+  else:
+    return '{}_{}'.format(_RECEIVER_FN_NAME, name)
+
+
+def _prune_receiver_tensors(wrapped_function, receiver_tensors, outputs, name):
+  inputs = _canonicalize_receiver_tensors(receiver_tensors)
+  return wrapped_function.prune(
+      inputs,
+      outputs,
+      name=name,
+      input_signature=(None, func_graph.convert_structure_to_signature(inputs)))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/exporter.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/exporter.py
new file mode 100644
index 00000000..cf7aa71d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/exporter.py
@@ -0,0 +1,509 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""`Exporter` class represents different flavors of model export."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+import os
+import tensorflow as tf
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import gc
+from tensorflow_estimator.python.estimator import util
+from tensorflow_estimator.python.estimator.canned import metric_keys
+
+
+@estimator_export('estimator.Exporter')
+class Exporter(object):
+  """A class representing a type of model export."""
+
+  @abc.abstractproperty
+  def name(self):
+    """Directory name.
+
+    A directory name under the export base directory where exports of
+    this type are written.  Should not be `None` nor empty.
+    """
+    pass
+
+  @abc.abstractmethod
+  def export(self, estimator, export_path, checkpoint_path, eval_result,
+             is_the_final_export):
+    """Exports the given `Estimator` to a specific format.
+
+    Args:
+      estimator: the `Estimator` to export.
+      export_path: A string containing a directory where to write the export.
+      checkpoint_path: The checkpoint path to export.
+      eval_result: The output of `Estimator.evaluate` on this checkpoint.
+      is_the_final_export: This boolean is True when this is an export in the
+        end of training.  It is False for the intermediate exports during the
+        training. When passing `Exporter` to `tf.estimator.train_and_evaluate`
+        `is_the_final_export` is always False if `TrainSpec.max_steps` is
+        `None`.
+
+    Returns:
+      The string path to the exported directory or `None` if export is skipped.
+    """
+    pass
+
+
+class _SavedModelExporter(Exporter):
+  """This class exports the serving graph and checkpoints.
+
+     This class provides a basic exporting functionality and serves as a
+     foundation for specialized `Exporter`s.
+  """
+
+  def __init__(self,
+               name,
+               serving_input_receiver_fn,
+               assets_extra=None,
+               as_text=False):
+    """Create an `Exporter` to use with `tf.estimator.EvalSpec`.
+
+    Args:
+      name: unique name of this `Exporter` that is going to be used in the
+        export path.
+      serving_input_receiver_fn: a function that takes no arguments and returns
+        a `ServingInputReceiver`.
+      assets_extra: An optional dict specifying how to populate the assets.extra
+        directory within the exported SavedModel.  Each key should give the
+        destination path (including the filename) relative to the assets.extra
+        directory.  The corresponding value gives the full path of the source
+        file to be copied.  For example, the simple case of copying a single
+        file without renaming it is specified as
+        `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+      as_text: whether to write the SavedModel proto in text format. Defaults to
+        `False`.
+
+    Raises:
+      ValueError: if any arguments is invalid.
+    """
+    self._name = name
+    self._serving_input_receiver_fn = serving_input_receiver_fn
+    self._assets_extra = assets_extra
+    self._as_text = as_text
+
+  @property
+  def name(self):
+    return self._name
+
+  def export(self, estimator, export_path, checkpoint_path, eval_result,
+             is_the_final_export):
+    del is_the_final_export
+
+    export_result = estimator.export_saved_model(
+        export_path,
+        self._serving_input_receiver_fn,
+        assets_extra=self._assets_extra,
+        as_text=self._as_text,
+        checkpoint_path=checkpoint_path)
+
+    return export_result
+
+
+def _loss_smaller(best_eval_result, current_eval_result):
+  """Compares two evaluation results and returns true if the 2nd one is smaller.
+
+  Both evaluation results should have the values for MetricKeys.LOSS, which are
+  used for comparison.
+
+  Args:
+    best_eval_result: best eval metrics.
+    current_eval_result: current eval metrics.
+
+  Returns:
+    True if the loss of current_eval_result is smaller; otherwise, False.
+
+  Raises:
+    ValueError: If input eval result is None or no loss is available.
+  """
+  default_key = metric_keys.MetricKeys.LOSS
+  if not best_eval_result or default_key not in best_eval_result:
+    raise ValueError(
+        'best_eval_result cannot be empty or no loss is found in it.')
+
+  if not current_eval_result or default_key not in current_eval_result:
+    raise ValueError(
+        'current_eval_result cannot be empty or no loss is found in it.')
+
+  return best_eval_result[default_key] > current_eval_result[default_key]
+
+
+def _verify_compare_fn_args(compare_fn):
+  """Verifies compare_fn arguments."""
+  args = set(util.fn_args(compare_fn))
+  if 'best_eval_result' not in args:
+    raise ValueError('compare_fn (%s) must include best_eval_result argument.' %
+                     compare_fn)
+  if 'current_eval_result' not in args:
+    raise ValueError(
+        'compare_fn (%s) must include current_eval_result argument.' %
+        compare_fn)
+  non_valid_args = list(args - set(['best_eval_result', 'current_eval_result']))
+  if non_valid_args:
+    raise ValueError('compare_fn (%s) has following not expected args: %s' %
+                     (compare_fn, non_valid_args))
+
+
+@estimator_export('estimator.BestExporter')
+class BestExporter(Exporter):
+  """This class exports the serving graph and checkpoints of the best models.
+
+  This class performs a model export everytime the new model is better than any
+  existing model.
+  """
+
+  def __init__(self,
+               name='best_exporter',
+               serving_input_receiver_fn=None,
+               event_file_pattern='eval/*.tfevents.*',
+               compare_fn=_loss_smaller,
+               assets_extra=None,
+               as_text=False,
+               exports_to_keep=5):
+    """Create an `Exporter` to use with `tf.estimator.EvalSpec`.
+
+    Example of creating a BestExporter for training and evaluation:
+
+    ```python
+    def make_train_and_eval_fn():
+      # Set up feature columns.
+      categorical_feature_a = (
+          tf.feature_column.categorical_column_with_hash_bucket(...))
+      categorical_feature_a_emb = embedding_column(
+          categorical_column=categorical_feature_a, ...)
+      ...  # other feature columns
+
+      estimator = tf.estimator.DNNClassifier(
+          config=tf.estimator.RunConfig(
+              model_dir='/my_model', save_summary_steps=100),
+          feature_columns=[categorical_feature_a_emb, ...],
+          hidden_units=[1024, 512, 256])
+
+      serving_feature_spec = tf.feature_column.make_parse_example_spec(
+          categorical_feature_a_emb)
+      serving_input_receiver_fn = (
+          tf.estimator.export.build_parsing_serving_input_receiver_fn(
+          serving_feature_spec))
+
+      exporter = tf.estimator.BestExporter(
+          name="best_exporter",
+          serving_input_receiver_fn=serving_input_receiver_fn,
+          exports_to_keep=5)
+
+      train_spec = tf.estimator.TrainSpec(...)
+
+      eval_spec = [tf.estimator.EvalSpec(
+        input_fn=eval_input_fn,
+        steps=100,
+        exporters=exporter,
+        start_delay_secs=0,
+        throttle_secs=5)]
+
+      tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
+
+    ```
+
+    Args:
+      name: unique name of this `Exporter` that is going to be used in the
+        export path.
+      serving_input_receiver_fn: a function that takes no arguments and returns
+        a `ServingInputReceiver`.
+      event_file_pattern: event file name pattern relative to model_dir. If
+        None, however, the exporter would not be preemption-safe. To be
+        preemption-safe, event_file_pattern must be specified.
+      compare_fn: a function that compares two evaluation results and returns
+        true if current evaluation result is better. Follows the signature:
+        * Args:
+          * `best_eval_result`: This is the evaluation result of the best model.
+          * `current_eval_result`: This is the evaluation result of current
+            candidate model.
+        * Returns: True if current evaluation result is better; otherwise,
+          False.
+      assets_extra: An optional dict specifying how to populate the assets.extra
+        directory within the exported SavedModel.  Each key should give the
+        destination path (including the filename) relative to the assets.extra
+        directory.  The corresponding value gives the full path of the source
+        file to be copied.  For example, the simple case of copying a single
+        file without renaming it is specified as `{'my_asset_file.txt':
+          '/path/to/my_asset_file.txt'}`.
+      as_text: whether to write the SavedModel proto in text format. Defaults to
+        `False`.
+      exports_to_keep: Number of exports to keep.  Older exports will be
+        garbage-collected.  Defaults to 5.  Set to `None` to disable garbage
+        collection.
+
+    Raises:
+      ValueError: if any argument is invalid.
+    """
+    self._compare_fn = compare_fn
+    if self._compare_fn is None:
+      raise ValueError('`compare_fn` must not be None.')
+    _verify_compare_fn_args(self._compare_fn)
+
+    self._saved_model_exporter = _SavedModelExporter(name,
+                                                     serving_input_receiver_fn,
+                                                     assets_extra, as_text)
+
+    self._event_file_pattern = event_file_pattern
+    self._model_dir = None
+    self._best_eval_result = None
+    self._has_exported = False
+
+    self._exports_to_keep = exports_to_keep
+    if exports_to_keep is not None and exports_to_keep <= 0:
+      raise ValueError(
+          '`exports_to_keep`, if provided, must be a positive number. Got %s' %
+          exports_to_keep)
+
+  @property
+  def name(self):
+    return self._saved_model_exporter.name
+
+  def export(self, estimator, export_path, checkpoint_path, eval_result,
+             is_the_final_export):
+    export_result = None
+
+    if self._model_dir != estimator.model_dir and self._event_file_pattern:
+      # Loads best metric from event files.
+      tf.compat.v1.logging.info('Loading best metric from event files.')
+
+      self._model_dir = estimator.model_dir
+      full_event_file_pattern = os.path.join(self._model_dir,
+                                             self._event_file_pattern)
+      self._best_eval_result = self._get_best_eval_result(
+          full_event_file_pattern)
+
+    if (self._best_eval_result is None or
+        # check if this is the first export.
+        not self._has_exported or self._compare_fn(
+            best_eval_result=self._best_eval_result,
+            current_eval_result=eval_result)):
+      tf.compat.v1.logging.info('Performing best model export.')
+      self._best_eval_result = eval_result
+      export_result = self._saved_model_exporter.export(estimator, export_path,
+                                                        checkpoint_path,
+                                                        eval_result,
+                                                        is_the_final_export)
+      self._garbage_collect_exports(export_path)
+      self._has_exported = True
+
+    return export_result
+
+  def _garbage_collect_exports(self, export_dir_base):
+    """Deletes older exports, retaining only a given number of the most recent.
+
+    Export subdirectories are assumed to be named with monotonically increasing
+    integers; the most recent are taken to be those with the largest values.
+
+    Args:
+      export_dir_base: the base directory under which each export is in a
+        versioned subdirectory.
+    """
+    if self._exports_to_keep is None:
+      return
+
+    def _export_version_parser(path):
+      # create a simple parser that pulls the export_version from the directory.
+      filename = os.path.basename(path.path)
+      if not (len(filename) == 10 and filename.isdigit()):
+        return None
+      return path._replace(export_version=int(filename))
+
+    # pylint: disable=protected-access
+    keep_filter = gc._largest_export_versions(self._exports_to_keep)
+    delete_filter = gc._negation(keep_filter)
+    for p in delete_filter(
+        gc._get_paths(export_dir_base, parser=_export_version_parser)):
+      try:
+        tf.compat.v1.gfile.DeleteRecursively(p.path)
+      except tf.errors.NotFoundError as e:
+        tf.compat.v1.logging.warn('Can not delete %s recursively: %s', p.path,
+                                  e)
+    # pylint: enable=protected-access
+
+  def _get_best_eval_result(self, event_files):
+    """Get the best eval result from event files.
+
+    Args:
+      event_files: Absolute pattern of event files.
+
+    Returns:
+      The best eval result.
+    """
+    if not event_files:
+      return None
+
+    best_eval_result = None
+    for event_file in tf.compat.v1.gfile.Glob(os.path.join(event_files)):
+      for event in tf.compat.v1.train.summary_iterator(event_file):
+        if event.HasField('summary'):
+          event_eval_result = {}
+          for value in event.summary.value:
+            if value.HasField('simple_value'):
+              event_eval_result[value.tag] = value.simple_value
+          if event_eval_result:
+            if best_eval_result is None or self._compare_fn(
+                best_eval_result, event_eval_result):
+              best_eval_result = event_eval_result
+    return best_eval_result
+
+
+@estimator_export('estimator.FinalExporter')
+class FinalExporter(Exporter):
+  """This class exports the serving graph and checkpoints at the end.
+
+  This class performs a single export at the end of training.
+  """
+
+  def __init__(self,
+               name,
+               serving_input_receiver_fn,
+               assets_extra=None,
+               as_text=False):
+    """Create an `Exporter` to use with `tf.estimator.EvalSpec`.
+
+    Args:
+      name: unique name of this `Exporter` that is going to be used in the
+        export path.
+      serving_input_receiver_fn: a function that takes no arguments and returns
+        a `ServingInputReceiver`.
+      assets_extra: An optional dict specifying how to populate the assets.extra
+        directory within the exported SavedModel.  Each key should give the
+        destination path (including the filename) relative to the assets.extra
+        directory.  The corresponding value gives the full path of the source
+        file to be copied.  For example, the simple case of copying a single
+        file without renaming it is specified as
+        `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+      as_text: whether to write the SavedModel proto in text format. Defaults to
+        `False`.
+
+    Raises:
+      ValueError: if any arguments is invalid.
+    """
+    self._saved_model_exporter = _SavedModelExporter(name,
+                                                     serving_input_receiver_fn,
+                                                     assets_extra, as_text)
+
+  @property
+  def name(self):
+    return self._saved_model_exporter.name
+
+  def export(self, estimator, export_path, checkpoint_path, eval_result,
+             is_the_final_export):
+    if not is_the_final_export:
+      return None
+
+    tf.compat.v1.logging.info(
+        'Performing the final export in the end of training.')
+
+    return self._saved_model_exporter.export(estimator, export_path,
+                                             checkpoint_path, eval_result,
+                                             is_the_final_export)
+
+
+@estimator_export('estimator.LatestExporter')
+class LatestExporter(Exporter):
+  """This class regularly exports the serving graph and checkpoints.
+
+  In addition to exporting, this class also garbage collects stale exports.
+  """
+
+  def __init__(self,
+               name,
+               serving_input_receiver_fn,
+               assets_extra=None,
+               as_text=False,
+               exports_to_keep=5):
+    """Create an `Exporter` to use with `tf.estimator.EvalSpec`.
+
+    Args:
+      name: unique name of this `Exporter` that is going to be used in the
+        export path.
+      serving_input_receiver_fn: a function that takes no arguments and returns
+        a `ServingInputReceiver`.
+      assets_extra: An optional dict specifying how to populate the assets.extra
+        directory within the exported SavedModel.  Each key should give the
+        destination path (including the filename) relative to the assets.extra
+        directory.  The corresponding value gives the full path of the source
+        file to be copied.  For example, the simple case of copying a single
+        file without renaming it is specified as
+        `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
+      as_text: whether to write the SavedModel proto in text format. Defaults to
+        `False`.
+      exports_to_keep: Number of exports to keep.  Older exports will be
+        garbage-collected.  Defaults to 5.  Set to `None` to disable garbage
+        collection.
+
+    Raises:
+      ValueError: if any arguments is invalid.
+    """
+    self._saved_model_exporter = _SavedModelExporter(name,
+                                                     serving_input_receiver_fn,
+                                                     assets_extra, as_text)
+    self._exports_to_keep = exports_to_keep
+    if exports_to_keep is not None and exports_to_keep <= 0:
+      raise ValueError(
+          '`exports_to_keep`, if provided, must be positive number')
+
+  @property
+  def name(self):
+    return self._saved_model_exporter.name
+
+  def export(self, estimator, export_path, checkpoint_path, eval_result,
+             is_the_final_export):
+    export_result = self._saved_model_exporter.export(estimator, export_path,
+                                                      checkpoint_path,
+                                                      eval_result,
+                                                      is_the_final_export)
+
+    self._garbage_collect_exports(export_path)
+    return export_result
+
+  def _garbage_collect_exports(self, export_dir_base):
+    """Deletes older exports, retaining only a given number of the most recent.
+
+    Export subdirectories are assumed to be named with monotonically increasing
+    integers; the most recent are taken to be those with the largest values.
+
+    Args:
+      export_dir_base: the base directory under which each export is in a
+        versioned subdirectory.
+    """
+    if self._exports_to_keep is None:
+      return
+
+    def _export_version_parser(path):
+      # create a simple parser that pulls the export_version from the directory.
+      filename = os.path.basename(path.path)
+      if not (len(filename) == 10 and filename.isdigit()):
+        return None
+      return path._replace(export_version=int(filename))
+
+    # pylint: disable=protected-access
+    keep_filter = gc._largest_export_versions(self._exports_to_keep)
+    delete_filter = gc._negation(keep_filter)
+    for p in delete_filter(
+        gc._get_paths(export_dir_base, parser=_export_version_parser)):
+      try:
+        tf.compat.v1.gfile.DeleteRecursively(p.path)
+      except tf.errors.NotFoundError as e:
+        tf.compat.v1.logging.warn('Can not delete %s recursively: %s', p.path,
+                                  e)
+    # pylint: enable=protected-access
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/extenders.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/extenders.py
new file mode 100644
index 00000000..07c6fa33
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/extenders.py
@@ -0,0 +1,123 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Extenders of tf.estimator.Estimator."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.util import function_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+_VALID_METRIC_FN_ARGS = set(['features', 'labels', 'predictions', 'config'])
+
+
+@estimator_export('estimator.add_metrics')
+def add_metrics(estimator, metric_fn):
+  """Creates a new `tf.estimator.Estimator` which has given metrics.
+
+  Example:
+
+  ```python
+    def my_auc(labels, predictions):
+      auc_metric = tf.keras.metrics.AUC(name="my_auc")
+      auc_metric.update_state(y_true=labels, y_pred=predictions['logistic'])
+      return {'auc': auc_metric}
+
+    estimator = tf.estimator.DNNClassifier(...)
+    estimator = tf.estimator.add_metrics(estimator, my_auc)
+    estimator.train(...)
+    estimator.evaluate(...)
+  ```
+  Example usage of custom metric which uses features:
+
+  ```python
+    def my_auc(labels, predictions, features):
+      auc_metric = tf.keras.metrics.AUC(name="my_auc")
+      auc_metric.update_state(y_true=labels, y_pred=predictions['logistic'],
+                              sample_weight=features['weight'])
+      return {'auc': auc_metric}
+
+    estimator = tf.estimator.DNNClassifier(...)
+    estimator = tf.estimator.add_metrics(estimator, my_auc)
+    estimator.train(...)
+    estimator.evaluate(...)
+  ```
+
+  Args:
+    estimator: A `tf.estimator.Estimator` object.
+    metric_fn: A function which should obey the following signature:
+      - Args: can only have following four arguments in any order:
+        * predictions: Predictions `Tensor` or dict of `Tensor` created by given
+          `estimator`.
+        * features: Input `dict` of `Tensor` objects created by `input_fn` which
+          is given to `estimator.evaluate` as an argument.
+        * labels:  Labels `Tensor` or dict of `Tensor` created by `input_fn`
+          which is given to `estimator.evaluate` as an argument.
+        * config: config attribute of the `estimator`.
+       - Returns: Dict of metric results keyed by name. Final metrics are a
+         union of this and `estimator's` existing metrics. If there is a name
+         conflict between this and `estimator`s existing metrics, this will
+         override the existing one. The values of the dict are the results of
+         calling a metric function, namely a `(metric_tensor, update_op)` tuple.
+
+  Returns:
+      A new `tf.estimator.Estimator` which has a union of original metrics with
+        given ones.
+  """
+  _verify_metric_fn_args(metric_fn)
+
+  def new_model_fn(features, labels, mode, config):
+    spec = estimator.model_fn(features, labels, mode, config)
+    if mode != ModeKeys.EVAL:
+      return spec
+    new_metrics = _call_metric_fn(metric_fn, features, labels, spec.predictions,
+                                  config)
+    all_metrics = spec.eval_metric_ops or {}
+    all_metrics.update(new_metrics)
+    return spec._replace(eval_metric_ops=all_metrics)
+
+  return estimator_lib.Estimator(
+      model_fn=new_model_fn,
+      model_dir=estimator.model_dir,
+      config=estimator.config,
+      # pylint: disable=protected-access
+      warm_start_from=estimator._warm_start_settings)
+  # pylint: enable=protected-access
+
+
+def _verify_metric_fn_args(metric_fn):
+  args = set(function_utils.fn_args(metric_fn))
+  invalid_args = list(args - _VALID_METRIC_FN_ARGS)
+  if invalid_args:
+    raise ValueError('metric_fn (%s) has following not expected args: %s' %
+                     (metric_fn, invalid_args))
+
+
+def _call_metric_fn(metric_fn, features, labels, predictions, config):
+  """Calls metric fn with proper arguments."""
+  metric_fn_args = function_utils.fn_args(metric_fn)
+  kwargs = {}
+  if 'features' in metric_fn_args:
+    kwargs['features'] = features
+  if 'labels' in metric_fn_args:
+    kwargs['labels'] = labels
+  if 'predictions' in metric_fn_args:
+    kwargs['predictions'] = predictions
+  if 'config' in metric_fn_args:
+    kwargs['config'] = config
+  return metric_fn(**kwargs)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/gc.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/gc.py
new file mode 100644
index 00000000..891d9df7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/gc.py
@@ -0,0 +1,217 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""System for specifying garbage collection (GC) of path based data.
+
+This framework allows for GC of data specified by path names, for example files
+on disk.  gc.Path objects each represent a single item stored at a path and may
+be a base directory,
+  /tmp/exports/0/...
+  /tmp/exports/1/...
+  ...
+or a fully qualified file,
+  /tmp/train-1.ckpt
+  /tmp/train-2.ckpt
+  ...
+
+A gc filter function takes and returns a list of gc.Path items.  Filter
+functions are responsible for selecting Path items for preservation or deletion.
+Note that functions should always return a sorted list.
+
+For example,
+  base_dir = "/tmp"
+  # Create the directories.
+  for e in xrange(10):
+    os.mkdir("%s/%d" % (base_dir, e), 0o755)
+
+  # Create a simple parser that pulls the export_version from the directory.
+  path_regex = "^" + re.escape(base_dir) + "/(\\d+)$"
+  def parser(path):
+    match = re.match(path_regex, path.path)
+    if not match:
+      return None
+    return path._replace(export_version=int(match.group(1)))
+
+  path_list = gc._get_paths("/tmp", parser)  # contains all ten Paths
+
+  every_fifth = gc._mod_export_version(5)
+  print(every_fifth(path_list))  # shows ["/tmp/0", "/tmp/5"]
+
+  largest_three = gc.largest_export_versions(3)
+  print(largest_three(all_paths))  # shows ["/tmp/7", "/tmp/8", "/tmp/9"]
+
+  both = gc._union(every_fifth, largest_three)
+  print(both(all_paths))  # shows ["/tmp/0", "/tmp/5",
+                          #        "/tmp/7", "/tmp/8", "/tmp/9"]
+  # Delete everything not in 'both'.
+  to_delete = gc._negation(both)
+  for p in to_delete(all_paths):
+    gfile.DeleteRecursively(p.path)  # deletes:  "/tmp/1", "/tmp/2",
+                                     # "/tmp/3", "/tmp/4", "/tmp/6",
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import collections
+import heapq
+import math
+import os
+import tensorflow as tf
+from tensorflow.python.platform import gfile
+
+Path = collections.namedtuple('Path', 'path export_version')
+
+
+def _largest_export_versions(n):
+  """Creates a filter that keeps the largest n export versions.
+
+  Args:
+    n: number of versions to keep.
+
+  Returns:
+    A filter function that keeps the n largest paths.
+  """
+
+  def keep(paths):
+    heap = []
+    for idx, path in enumerate(paths):
+      if path.export_version is not None:
+        heapq.heappush(heap, (path.export_version, idx))
+    keepers = [paths[i] for _, i in heapq.nlargest(n, heap)]
+    return sorted(keepers)
+
+  return keep
+
+
+def _one_of_every_n_export_versions(n):
+  """Creates a filter that keeps one of every n export versions.
+
+  Args:
+    n: interval size.
+
+  Returns:
+    A filter function that keeps exactly one path from each interval
+    [0, n], (n, 2n], (2n, 3n], etc...  If more than one path exists in an
+    interval the largest is kept.
+  """
+
+  def keep(paths):
+    """A filter function that keeps exactly one out of every n paths."""
+
+    keeper_map = {}  # map from interval to largest path seen in that interval
+    for p in paths:
+      if p.export_version is None:
+        # Skip missing export_versions.
+        continue
+      # Find the interval (with a special case to map export_version = 0 to
+      # interval 0.
+      interval = math.floor(
+          (p.export_version - 1) / n) if p.export_version else 0
+      existing = keeper_map.get(interval, None)
+      if (not existing) or (existing.export_version < p.export_version):
+        keeper_map[interval] = p
+    return sorted(keeper_map.values())
+
+  return keep
+
+
+def _mod_export_version(n):
+  """Creates a filter that keeps every export that is a multiple of n.
+
+  Args:
+    n: step size.
+
+  Returns:
+    A filter function that keeps paths where export_version % n == 0.
+  """
+
+  def keep(paths):
+    keepers = []
+    for p in paths:
+      if p.export_version % n == 0:
+        keepers.append(p)
+    return sorted(keepers)
+
+  return keep
+
+
+def _union(lf, rf):
+  """Creates a filter that keeps the union of two filters.
+
+  Args:
+    lf: first filter
+    rf: second filter
+
+  Returns:
+    A filter function that keeps the n largest paths.
+  """
+
+  def keep(paths):
+    l = set(lf(paths))
+    r = set(rf(paths))
+    return sorted(list(l | r))
+
+  return keep
+
+
+def _negation(f):
+  """Negate a filter.
+
+  Args:
+    f: filter function to invert
+
+  Returns:
+    A filter function that returns the negation of f.
+  """
+
+  def keep(paths):
+    l = set(paths)
+    r = set(f(paths))
+    return sorted(list(l - r))
+
+  return keep
+
+
+def _get_paths(base_dir, parser):
+  """Gets a list of Paths in a given directory.
+
+  Args:
+    base_dir: directory.
+    parser: a function which gets the raw Path and can augment it with
+      information such as the export_version, or ignore the path by returning
+      None.  An example parser may extract the export version from a path such
+      as "/tmp/exports/100" an another may extract from a full file name such as
+      "/tmp/checkpoint-99.out".
+
+  Returns:
+    A list of Paths contained in the base directory with the parsing function
+    applied.
+    By default the following fields are populated,
+      - Path.path
+    The parsing function is responsible for populating,
+      - Path.export_version
+  """
+  # We are mocking this in the test, hence we should not use public API
+  raw_paths = gfile.ListDirectory(base_dir)
+  paths = []
+  for r in raw_paths:
+    # ListDirectory() return paths with "/" at the last if base_dir was GCS URL
+    r = tf.compat.as_str_any(r)
+    if r[-1] == '/':
+      r = r[0:len(r) - 1]
+    p = parser(Path(os.path.join(tf.compat.as_str_any(base_dir), r), None))
+    if p:
+      paths.append(p)
+  return sorted(paths)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/base_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/base_head.py
new file mode 100644
index 00000000..ca0f63fb
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/base_head.py
@@ -0,0 +1,934 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Abstractions for the base head class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+
+import six
+import tensorflow as tf
+from tensorflow.python.feature_column import feature_column_lib
+from tensorflow.python.feature_column.feature_column import _LazyBuilder
+from tensorflow.python.feature_column.feature_column import _NumericColumn
+from tensorflow.python.framework import dtypes
+from tensorflow.python.framework import dtypes
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.optimizer_v2 import optimizer_v2
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.ops import weights_broadcast_ops
+from tensorflow.python.util import function_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.export import export_output
+
+DEFAULT_SERVING_KEY = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+
+# The above default is defined by TF Serving, but these next three are just
+# a local convention without any special meaning.
+CLASSIFY_SERVING_KEY = 'classification'
+REGRESS_SERVING_KEY = 'regression'
+PREDICT_SERVING_KEY = 'predict'
+
+
+@estimator_export('estimator.Head')
+@six.add_metaclass(abc.ABCMeta)
+class Head(object):
+  """Interface for the head/top of a model.
+
+  Head sits on top of the model network and handles computing the outputs of
+  the network. Given logits (or output of a hidden layer), a Head knows how to
+  compute predictions, loss, train_op, metrics and export outputs. It is meant
+  to:
+
+  1. Simplify writing model_fn and to make model_fn more configurable for
+     Estimator.
+  2. Simpilfy creating loss and metrics for the train and test loop in Eager
+     execution.
+  3. Support wide range of machine learning models. Since most heads can work
+     with logits, they can support DNN, RNN, Wide, Wide&Deep,
+     Global objectives, Gradient boosted trees and many other types
+     of machine learning models.
+
+  Common usage:
+  Here is simplified model_fn to build a DNN regression model.
+    ```python
+    def _my_dnn_model_fn(features, labels, mode, params, config=None):
+      # Optionally your callers can pass head to model_fn as a param.
+      head = tf.estimator.RegressionHead(...)
+
+      feature_columns = tf.feature_column.numeric_column(...)
+      feature_layer = tf.keras.layers.DenseFeatures(feature_columns)
+      inputs = feature_layer(features)
+
+      # Compute logits with tf.keras.layers API
+      hidden_layer0 = tf.keras.layers.Dense(
+          units=1000, activation="relu")(inputs)
+      hidden_layer1 = tf.keras.layers.Dense(
+          units=500, activation="relu")(hidden_layer0)
+      logits = tf.keras.layers.Dense(
+          units=head.logits_dimension, activation=None)(hidden_layer1)
+
+      # Or use Keras model for logits computation
+      model = tf.keras.Sequential()
+      model.add(tf.keras.layers.Dense(units=1000, activation="relu"))
+      model.add(tf.keras.layers.Dense(units=500, activation="relu"))
+      model.add(tf.keras.layers.Dense(
+         units=head.logits_dimension, activation=None))
+      logits = model(inputs)
+
+      return head.create_estimator_spec(
+          features=features,
+          labels=labels,
+          mode=mode,
+          logits=logits,
+          optimizer=optimizer)
+    ```
+  """
+
+  @abc.abstractproperty
+  def name(self):
+    """The name of this head.
+
+    Returns:
+      A string.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractproperty
+  def logits_dimension(self):
+    """Size of the last dimension of the logits `Tensor`.
+
+    Often is the number of classes, labels, or real values to be predicted.
+    Typically, logits is of shape `[batch_size, logits_dimension]`.
+
+    Returns:
+      The expected size of the `logits` tensor.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractproperty
+  def loss_reduction(self):
+    """One of `tf.losses.Reduction`.
+
+    Describes how to reduce training loss over batch, such as mean or sum.
+
+    Returns:
+      The type of loss reduction used in the head.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractmethod
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Returns a loss `Tensor` from provided arguments.
+
+    Note that, the args of `features` and `mode` are most likely not used, but
+    some Head implementations may require them.
+
+    Args:
+      labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
+        objects of the label values.
+      logits: Logits `Tensor` to be used for loss construction.
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      mode: Estimator's `ModeKeys`. To be used in case loss calculation is
+        different in Train and Eval mode.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      A scalar `Tensor` representing regularized training loss used in train and
+      eval.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractmethod
+  def predictions(self, logits, keys=None):
+    """Returns a `dict` of predictions from provided logits.
+
+    Args:
+      logits: Logits `Tensor` to be used for prediction construction.
+      keys: A list of `string` for prediction keys. Defaults to `None`, meaning
+        if not specified, predictions will be created for all the pre-defined
+        valid keys in the head.
+
+    Returns:
+      A `dict` of predicted `Tensor` keyed by prediction name.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractmethod
+  def metrics(self, regularization_losses=None):
+    """Returns a `dict` of metric objects.
+
+    Args:
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+       A `dict` of metrics keyed by string name. The value is an instance of
+       `Metric` class.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  @abc.abstractmethod
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     mode=None,
+                     regularization_losses=None):
+    """Updates metric objects and returns a `dict` of the updated metrics.
+
+    Args:
+      eval_metrics: A `dict` of metrics to be updated.
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      logits: logits `Tensor` to be used for metrics update.
+      labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
+        objects of the label values.
+      mode: Estimator's `ModeKeys`. In most cases, this arg is not used and can
+        be removed in the method implementation.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training and evaluation loss, such as regularization losses.  Note
+        that, the `mode` arg is not used in the `tf.estimator.*Head`. If the
+        update of the metrics doesn't rely on `mode`, it can be safely ignored
+        in the method signature.
+
+    Returns:
+       A `dict` of updated metrics keyed by name. The value is an instance of
+       `Metric` class.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+  def _summary_key(self, key):
+    return '{}/{}'.format(key, self.name) if self.name else key
+
+  def create_estimator_spec(self,
+                            features,
+                            mode,
+                            logits,
+                            labels=None,
+                            optimizer=None,
+                            trainable_variables=None,
+                            train_op_fn=None,
+                            update_ops=None,
+                            regularization_losses=None):
+    """Returns `EstimatorSpec` that a model_fn can return.
+
+    It is recommended to pass all args via name.
+
+    Args:
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      mode: Estimator's `ModeKeys`.
+      logits: Logits `Tensor` to be used by the head.
+      labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
+        objects of the label values.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns an op
+        to optimize the model with the loss in TRAIN mode. Used if `optimizer`
+        is `None`. Exactly one of `train_op_fn` and `optimizer` must be set in
+        TRAIN mode. By default, it is `None` in other modes. If you want to
+        optimize loss yourself, you can pass `lambda _: tf.no_op()` and then use
+          `EstimatorSpec.loss` to compute and apply gradients.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      `EstimatorSpec`.
+    """
+    # Not all subclasses of Head will have implemented
+    # _create_tpu_estimator_spec. If it is implemented, we can convert it to
+    # the normal `EstimatorSpec` by calling the method of
+    # `_TPUEstimatorSpec.as_estimator_spec()`.
+    try:
+      tpu_estimator_spec = (
+          self._create_tpu_estimator_spec(
+              features=features,
+              mode=mode,
+              logits=logits,
+              labels=labels,
+              optimizer=optimizer,
+              trainable_variables=trainable_variables,
+              train_op_fn=train_op_fn,
+              update_ops=update_ops,
+              regularization_losses=regularization_losses))
+      return tpu_estimator_spec.as_estimator_spec()
+    except NotImplementedError:
+      raise NotImplementedError(
+          'Subclasses of Head must implement `create_estimator_spec()` or '
+          '_create_tpu_estimator_spec().')
+
+  def _create_tpu_estimator_spec(
+      self,
+      features,
+      mode,
+      logits,
+      labels=None,
+      optimizer=None,
+      trainable_variables=None,
+      train_op_fn=None,
+      update_ops=None,
+      regularization_losses=None,
+  ):
+    """Returns `model_fn._TPUEstimatorSpec` that a model_fn can return.
+
+    Args:
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      mode: Estimator's `ModeKeys`.
+      logits: Logits `Tensor` to be used by the head.
+      labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
+        objects of the label values.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns an op
+        to optimize the model with the loss in TRAIN mode. Used if `optimizer`
+        is `None`. Exactly one of `train_op_fn` and `optimizer` must be set in
+        TRAIN mode. By default, it is `None` in other modes. If you want to
+        optimize loss yourself, you can pass `lambda _: tf.no_op()` and then use
+          `EstimatorSpec.loss` to compute and apply gradients.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec' instance.
+    """
+    raise NotImplementedError(
+        'TPUEstimatorSpec not available for this model head.')
+
+
+# TODO(b/119617064): unify eager and graph implementations
+# Note that, tensor shape checking is slow in Eager mode. To amend it, the
+# tensor static shape is used for checking. The duplication of shape checking
+# for eager mode in the following helper functions can be safely removed
+# if there's some way to get around it in the future.
+
+# Label shape error messages.
+_LABEL_NONE_ERR_MSG = (
+    'You must provide a labels Tensor. Given: None. '
+    'Suggested troubleshooting steps: Check that your data contains your label '
+    'feature. Check that your input_fn properly parses and returns labels.')
+
+_SPARSE_LABEL_ERR_MSG = (
+    'SparseTensor labels are not supported. Labels must be a Tensor of shape '
+    '[D0, D1, ..., DN, {}], e.g. [batch_size, {}].Suggested Fix (1): Check the'
+    ' label feature in your data. Each example must contain {} value(s). If '
+    'not, your choice of label was probably incorrect. Suggested Fix (2): In '
+    'your input_fn, use tf.sparse_tensor_to_dense() to turn labels into a '
+    'Tensor.')
+
+_MISMATCHED_LABEL_DIM_ERR_MSG = (
+    'Mismatched label shape. Expected labels dimension={}.  Received {}. '
+    'Suggested Fix: If your classifier expects one-hot encoding label, check '
+    'your n_classes argument to the estimator and/or the shape of your label. '
+    'Otherwise, check the shape of your label.')
+
+_LABEL_SHAPE_ERR_MSG = (
+    'labels shape must be [D0, D1, ... DN, {}]. Suggested Fix: check your '
+    'n_classes argument to the head and/or the shape of your label.')
+
+_VALIDATION_ERROR_MSG = '{} should be a list or a tuple. Given type: {}.'
+
+
+def check_dense_labels_match_logits_and_reshape(labels, logits,
+                                                expected_labels_dimension):
+  """Checks labels shape matches logits, and reshapes if needed.
+
+  Consider logits of shape [D0, D1, ... DN, logits_dimension]. Then labels
+  shape must be [D0, D1, ... DN, expected_labels_dimension].
+  If expected_labels_dimension=1, labels could be [D0, D1, ... DN] and this
+  method reshapes them to [D0, D1, ... DN, 1].
+
+  Args:
+    labels: labels Tensor.
+    logits: logits Tensor.
+    expected_labels_dimension: Integer.
+
+  Returns:
+    Validated and reshaped labels Tensor.
+
+  Raises:
+    ValueError: If labels is a SparseTensor.
+    ValueError: If labels shape is statically defined and fails validation.
+    OpError: If labels shape is not statically defined and fails validation.
+  """
+  if labels is None:
+    raise ValueError(_LABEL_NONE_ERR_MSG)
+  with ops.name_scope('labels', values=(labels, logits)) as scope:
+    labels = tf.compat.v1.convert_to_tensor_or_sparse_tensor(labels)
+    if isinstance(labels, tf.sparse.SparseTensor):
+      raise ValueError(
+          _SPARSE_LABEL_ERR_MSG.format(expected_labels_dimension,
+                                       expected_labels_dimension,
+                                       expected_labels_dimension))
+    # Eager mode.
+    if tf.executing_eagerly():
+      labels_rank = labels._rank()  # pylint: disable=protected-access
+      logits_rank = logits._rank()  # pylint: disable=protected-access
+      if (labels_rank is not None and logits_rank is not None and
+          labels_rank == logits_rank - 1):
+        labels = tf.compat.v1.expand_dims(labels, -1)
+        labels_rank += 1
+      labels_shape = labels._shape_tuple()  # pylint: disable=protected-access
+      if labels_rank < 2:
+        raise ValueError('labels must have rank at least 2.  Received rank {}, '
+                         'shape {}'.format(labels_rank, labels_shape))
+      if labels_shape[-1] != expected_labels_dimension:
+        raise ValueError(
+            _MISMATCHED_LABEL_DIM_ERR_MSG.format(expected_labels_dimension,
+                                                 labels_shape[-1]))
+      logits_shape = logits._shape_tuple()  # pylint: disable=protected-access
+      expected_labels_shape = logits_shape[:-1] + (expected_labels_dimension,)
+      if expected_labels_shape != labels_shape:
+        raise ValueError(
+            '{}, expected_labels_shape: {}. labels_shape: {}.'.format(
+                _LABEL_SHAPE_ERR_MSG.format(expected_labels_dimension),
+                expected_labels_shape, labels_shape))
+      return labels
+
+    # Graph mode.
+    if (labels.shape.ndims is not None and logits.shape.ndims is not None and
+        labels.shape.ndims == logits.shape.ndims - 1):
+      labels = tf.compat.v1.expand_dims(labels, -1)
+    assert_rank = tf.compat.v1.debugging.assert_rank_at_least(
+        labels,
+        2,
+        message=_LABEL_SHAPE_ERR_MSG.format(expected_labels_dimension))
+    with tf.control_dependencies([assert_rank]):
+      static_shape = labels.shape
+      if static_shape.ndims is not None:
+        final_dim = static_shape[-1]
+        if (final_dim is not None) and (final_dim != expected_labels_dimension):
+          raise ValueError(
+              _MISMATCHED_LABEL_DIM_ERR_MSG.format(expected_labels_dimension,
+                                                   final_dim))
+      logits_shape = tf.compat.v1.shape(logits)
+      expected_labels_shape = tf.concat(
+          [logits_shape[:-1], [expected_labels_dimension]], axis=0)
+      labels_shape = tf.compat.v1.shape(labels)
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          expected_labels_shape,
+          labels_shape,
+          message=_LABEL_SHAPE_ERR_MSG.format(expected_labels_dimension),
+          data=[
+              'expected_labels_shape: ', expected_labels_shape,
+              'labels_shape: ', labels_shape
+          ])
+      with tf.control_dependencies([assert_dimension]):
+        return tf.identity(labels, name=scope)
+
+
+def get_weights_and_check_match_logits(features,
+                                       weight_column,
+                                       logits,
+                                       allow_per_logit_weights=False):
+  """Fetches weights from features and checks that the shape matches logits.
+
+  Consider logits of shape [D0, D1, ... DN, logits_dimension]. Weights shape
+  can be either:
+  * [D0, D1, ... DN, logits_dimension] if `allow_per_logit_weights=True`.
+  * [D0, D1, ... DN, 1]
+  * [D0, D1, ... DN]: In this case, weights is reshaped into
+    [D0, D1, ... DN, 1] to work with weight broadcasting rules.
+
+  Args:
+    features: The features dict that contains weights.
+    weight_column: The weight column. If not given, this method returns 1.
+    logits: logits Tensor.
+    allow_per_logit_weights: Boolean. Whether we allow weights along the logits
+      dimension, namely shape `[D0, D1, ... DN, logits_dimension]`.
+
+  Returns:
+    Validated and reshaped weights Tensor.
+
+  Raises:
+    ValueError: If the weights `Tensor` cannot be cast into float.
+  """
+  if allow_per_logit_weights:
+    err_msg = ('weights shape must be [D0, D1, ... DN], [D0, D1, ... DN, 1] or '
+               '[D0, D1, ... DN, logits_dimension]')
+  else:
+    err_msg = ('weights shape must be [D0, D1, ... DN] or [D0, D1, ... DN, 1]')
+  with ops.name_scope(
+      'weights', values=tuple(six.itervalues(features)) + (logits,)) as scope:
+    # Fetch the weights.
+    if weight_column is None:
+      return 1.
+    # TODO(b/117839674): update feature_column
+    if isinstance(weight_column, six.string_types):
+      weight_column = tf.feature_column.numeric_column(
+          key=weight_column, shape=(1,))
+    if not isinstance(weight_column,
+                      (feature_column_lib.NumericColumn, _NumericColumn)):
+      raise TypeError('Weight column must be either a string or NumericColumn.'
+                      ' Given type: {}.'.format(type(weight_column)))
+    weights = weight_column._get_dense_tensor(  # pylint: disable=protected-access
+        _LazyBuilder(features))
+    if not (weights.dtype.is_floating or weights.dtype.is_integer):
+      raise ValueError('Weight column should be castable to float. '
+                       'Given dtype: {}'.format(weights.dtype))
+    weights = tf.cast(weights, name='weights', dtype=tf.dtypes.float32)
+    # Validate the weights shape.
+    # Eager mode.
+    if tf.executing_eagerly():
+      weights_shape = weights._shape_tuple()  # pylint: disable=protected-access
+      logits_shape = logits._shape_tuple()  # pylint: disable=protected-access
+      weights_rank = weights._rank()  # pylint: disable=protected-access
+      logits_rank = logits._rank()  # pylint: disable=protected-access
+      if (weights_rank is not None and logits_rank is not None and
+          weights_rank == logits_rank - 1):
+        if logits_shape[:-1] != weights_shape:
+          raise ValueError('{}, logits_shape: {}. weights_shape: {}.'.format(
+              err_msg, logits_shape, weights_shape))
+        return tf.compat.v1.expand_dims(weights, -1, name=scope)
+      supported_weights_shape = logits_shape[:-1] + (1,)
+      if allow_per_logit_weights:
+        if (logits_shape != weights_shape and
+            supported_weights_shape != weights_shape):
+          raise ValueError('{}, logits_shape: {}. weights_shape: {}.'.format(
+              err_msg, logits_shape, weights_shape))
+      else:
+        if supported_weights_shape != weights_shape:
+          raise ValueError('{}, logits_shape: {}. weights_shape: {}.'.format(
+              err_msg, logits_shape, weights_shape))
+      return weights
+
+    # Graph mode.
+    weights_shape = tf.compat.v1.shape(weights, name='weights_shape')
+    logits_shape = tf.compat.v1.shape(logits, name='logits_shape')
+    if (weights.shape.ndims is not None and logits.shape.ndims is not None and
+        weights.shape.ndims == logits.shape.ndims - 1):
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          logits_shape[:-1],
+          weights_shape,
+          message=err_msg,
+          data=[
+              'logits_shape: ', logits_shape, 'weights_shape: ', weights_shape
+          ])
+      with tf.control_dependencies([assert_dimension]):
+        return tf.compat.v1.expand_dims(weights, -1, name=scope)
+    supported_weights_shape = tf.concat([logits_shape[:-1], [1]], axis=0)
+    if allow_per_logit_weights:
+      condition = tf.math.reduce_any([
+          tf.reduce_all(tf.math.equal(logits_shape, weights_shape)),
+          tf.reduce_all(tf.math.equal(supported_weights_shape, weights_shape))
+      ])
+      assert_dimension = tf.debugging.Assert(
+          condition=condition,
+          data=[
+              err_msg, 'logits_shape: ', logits_shape, 'weights_shape: ',
+              weights_shape
+          ])
+    else:
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          supported_weights_shape,
+          weights_shape,
+          message=err_msg,
+          data=[
+              'logits_shape: ', logits_shape, 'weights_shape: ', weights_shape
+          ])
+    with tf.control_dependencies([assert_dimension]):
+      return tf.identity(weights, name=scope)
+
+
+def check_logits_final_dim(logits, expected_logits_dimension):
+  """Checks that logits shape is [D0, D1, ... DN, logits_dimension]."""
+  with ops.name_scope('logits', values=(logits,)) as scope:
+    logits = tf.cast(logits, tf.dtypes.float32)
+    # Eager mode
+    if tf.executing_eagerly():
+      logits_shape = logits._shape_tuple()  # pylint: disable=protected-access
+      logits_rank = logits._rank()  # pylint: disable=protected-access
+      if logits_rank < 2:
+        raise ValueError('logits must have rank at least 2.  Received rank {}, '
+                         'shape {}'.format(logits_rank, logits_shape))
+      if (isinstance(expected_logits_dimension, int) and
+          logits_shape[-1] != expected_logits_dimension):
+        raise ValueError(
+            'logits shape must be [D0, D1, ... DN, logits_dimension], '
+            'got {}.'.format(logits_shape))
+      return logits
+    # Graph mode
+    logits_shape = tf.compat.v1.shape(logits)
+    assert_rank = tf.compat.v1.debugging.assert_rank_at_least(
+        logits,
+        2,
+        data=[logits_shape],
+        message='logits shape must be [D0, D1, ... DN, logits_dimension]')
+    with tf.control_dependencies([assert_rank]):
+      static_shape = logits.shape
+      if static_shape.ndims is not None and static_shape[-1] is not None:
+        if (isinstance(expected_logits_dimension, int) and
+            static_shape[-1] != expected_logits_dimension):
+          raise ValueError(
+              'logits shape must be [D0, D1, ... DN, logits_dimension], '
+              'got {}.'.format(static_shape))
+        return logits
+      assert_dimension = tf.compat.v1.debugging.assert_equal(
+          expected_logits_dimension,
+          logits_shape[-1],
+          data=[logits_shape],
+          message='logits shape must be [D0, D1, ... DN, logits_dimension]')
+      with tf.control_dependencies([assert_dimension]):
+        return tf.identity(logits, name=scope)
+
+
+def validate_loss_fn_args(loss_fn):
+  """Validates loss_fn arguments.
+
+  Required arguments: labels, logits.
+  Optional arguments: features, loss_reduction.
+
+  Args:
+    loss_fn: The loss function.
+
+  Raises:
+    ValueError: If the signature is unexpected.
+  """
+  loss_fn_args = function_utils.fn_args(loss_fn)
+  for required_arg in ['labels', 'logits']:
+    if required_arg not in loss_fn_args:
+      raise ValueError('loss_fn must contain argument: {}. '
+                       'Given arguments: {}'.format(required_arg, loss_fn_args))
+  invalid_args = list(
+      set(loss_fn_args) -
+      set(['labels', 'logits', 'features', 'loss_reduction']))
+  if invalid_args:
+    raise ValueError('loss_fn has unexpected args: {}'.format(invalid_args))
+
+
+def validate_loss_reduction(loss_reduction):
+  if (loss_reduction not in losses_utils.ReductionV2.all() or
+      loss_reduction == losses_utils.ReductionV2.NONE):
+    raise ValueError(
+        'Invalid loss_reduction: {}. See `tf.losses.Reduction` for valid '
+        'options.'.format(loss_reduction))
+
+
+def validate_update_ops(update_ops=None):
+  if update_ops is not None and not isinstance(update_ops, (list, tuple)):
+    raise ValueError(
+        _VALIDATION_ERROR_MSG.format('update_ops', type(update_ops)))
+
+
+def validate_v2_optimizer(optimzier):
+  if not isinstance(optimzier, optimizer_v2.OptimizerV2):
+    raise ValueError(
+        'The given optimizer is not a tf.keras.optimizers.Optimizer instance. '
+        'Given: {}'.format(optimzier))
+
+
+def validate_trainable_variables(trainable_variables=None):
+  if trainable_variables is None:
+    raise ValueError('trainable_variables cannot be None. Given {}'.format(
+        trainable_variables))
+  if not isinstance(trainable_variables, (list, tuple)):
+    raise ValueError(
+        _VALIDATION_ERROR_MSG.format('trainable_variables',
+                                     type(trainable_variables)))
+
+
+def validate_n_classes(n_classes):
+  """Validates n_classes argument.
+
+  Required arguments: n_classes.
+
+  Args:
+    n_classes: The number of classes.
+
+  Raises:
+    ValueError: If n_classes is <= 2 and n_classes is a Python integer.
+  Returns:
+    n_classes in its original type.
+  """
+  if isinstance(n_classes, int) and (n_classes <= 2):
+    raise ValueError('n_classes must be > 2: %s.' % n_classes)
+
+  n_classes_as_tensor = ops.convert_to_tensor(n_classes)
+  assert_n_classes = tf.compat.v1.debugging.assert_greater(
+      n_classes_as_tensor, 2, message='n_classes must be greater than 2')
+  with tf.control_dependencies([assert_n_classes]):
+    tf.no_op()
+  # Return n_classes in its original type, so that any code
+  # using the accessor logits_dimension() has the original type.
+  return n_classes
+
+
+def call_loss_fn(loss_fn, labels, logits, features, expected_loss_dim=1):
+  """Calls loss_fn and checks the returned shape.
+
+  For shape checking, eager uses the static dimension to improve performance.
+
+  Args:
+    loss_fn: The loss function.
+    labels: Processed labels Tensor.
+    logits: Logits Tensor of shape [D0, D1, ... DN, logits_dimension].
+    features: Features dict.
+    expected_loss_dim: The expected last dimension of loss Tensor.
+
+  Returns:
+    Loss Tensor with shape [D0, D1, ... DN, expected_loss_dim].
+
+  Raises:
+    ValueError: If the loss tensor shape is unexpected.
+  """
+  loss_fn_args = function_utils.fn_args(loss_fn)
+  kwargs = {}
+  if 'features' in loss_fn_args:
+    kwargs['features'] = features
+  with ops.name_scope(
+      'call_loss_fn', values=[labels, logits] + list(six.itervalues(features))):
+    unweighted_loss = loss_fn(labels=labels, logits=logits, **kwargs)
+    # Eager mode.
+    if tf.executing_eagerly():
+      loss_shape = unweighted_loss._shape_tuple()  # pylint: disable=protected-access
+      logits_shape = logits._shape_tuple()  # pylint: disable=protected-access
+      expected_loss_shape = logits_shape[:-1] + (expected_loss_dim,)
+      if loss_shape != expected_loss_shape:
+        raise ValueError(
+            'loss_fn must return Tensor of shape '
+            '[D0, D1, ... DN, {}]. '.format(expected_loss_dim),
+            'logits_shape: ', logits_shape, 'loss_shape: ', loss_shape)
+      return unweighted_loss
+    # Graph mode.
+    logits_shape = tf.compat.v1.shape(logits, name='logits_shape')
+    expected_loss_shape = tf.concat([logits_shape[:-1], [expected_loss_dim]],
+                                    axis=0,
+                                    name='expected_loss_shape')
+    loss_shape = tf.compat.v1.shape(unweighted_loss, name='loss_shape')
+    check_loss_shape_op = tf.debugging.Assert(
+        tf.reduce_all(tf.math.equal(loss_shape, expected_loss_shape)),
+        data=[
+            'loss_fn must return Tensor of shape '
+            '[D0, D1, ... DN, {}]. '.format(expected_loss_dim),
+            'logits_shape: ', logits_shape, 'loss_shape: ', loss_shape
+        ],
+        name='check_loss_shape')
+    with tf.control_dependencies([check_loss_shape_op]):
+      return tf.identity(unweighted_loss)
+
+
+def check_prediction_keys(pred_keys, valid_keys):
+  for key in pred_keys:
+    if key not in valid_keys:
+      raise ValueError('Prediction key must be in PredictionKeys, given: {}.'
+                       'Valid prediction keys include {}.'.format(
+                           key, valid_keys))
+
+
+def all_class_ids(logits, n_classes):
+  batch_size = tf.compat.v1.shape(logits)[0]
+  class_id_list = tf.range(n_classes)
+  return tf.tile(
+      input=tf.compat.v1.expand_dims(input=class_id_list, axis=0),
+      multiples=[batch_size, 1])
+
+
+def all_classes(logits, n_classes, label_vocabulary=None):
+  batch_size = tf.compat.v1.shape(logits)[0]
+  if label_vocabulary:
+    classes_list = label_vocabulary
+  else:
+    classes_list = tf.strings.as_string(tf.range(n_classes))
+  return tf.tile(
+      input=tf.compat.v1.expand_dims(input=classes_list, axis=0),
+      multiples=[batch_size, 1])
+
+
+def classification_output(scores, n_classes, label_vocabulary=None):
+  return export_output.ClassificationOutput(
+      scores=scores,
+      # `ClassificationOutput` requires string classes.
+      classes=all_classes(scores, n_classes, label_vocabulary))
+
+
+def check_label_range(labels, n_classes, message=None):
+  """Check if labels are in the range of [0, n_classes)."""
+  with ops.name_scope('check_label_range', values=(labels,)):
+    # Eager mode
+    if tf.executing_eagerly():
+      assert_less = tf.reduce_all(tf.math.less_equal(labels, n_classes - 1))
+      if not assert_less:
+        raise ValueError(message or
+                         'Labels must be <= {} - 1'.format(n_classes))
+      assert_greater = tf.reduce_all(tf.math.greater_equal(labels, 0))
+      if not assert_greater:
+        raise ValueError(message or 'Labels must be >= 0')
+      return labels
+    # Graph mode
+    assert_less = tf.compat.v1.debugging.assert_less_equal(
+        labels,
+        ops.convert_to_tensor(n_classes - 1, dtype=labels.dtype),
+        message=message or 'Labels must be <= n_classes - 1')
+    assert_greater = tf.compat.v1.debugging.assert_non_negative(
+        labels, message=message or 'Labels must be >= 0')
+    with tf.control_dependencies((assert_less, assert_greater)):
+      return tf.identity(labels)
+
+
+def update_metric_with_broadcast_weights(eval_metric, values, weights):
+  values = tf.cast(values, dtype=tf.dtypes.float32)
+  if weights is not None:
+    weights = weights_broadcast_ops.broadcast_weights(weights, values)
+  eval_metric.update_state(values=values, sample_weight=weights)
+
+
+def create_eval_metrics_tuple(fn, kwargs):
+  """Creates TPU eval metrics tuple.
+
+  Helper function to make eval_metric tuple (eval_metric_fn, fn_kwargs) used
+  by `TPUEstimator`. TPUEstimator requires that `eval_metric_fn` take
+  exclusively Tensor arguments. This helper can help create such a function from
+  a more generic function that can take both Tensor and non-Tensor arguments.
+
+  Args:
+    fn: A eval_metric_fn that takes both Tensor and non-Tensor arguments. This
+      function must return a dict of form
+        {'metric name': (metric_tensor, eval_op)}
+    kwargs: Dict of arguments for `fn`.
+
+  Returns:
+    `eval_metric` tuple that can be passed to a `model_fn._TPUEstimatorSpec`.
+  """
+  tensor_kwargs = {}
+  nontensor_kwargs = {}
+  for k, v in six.iteritems(kwargs):
+    if tf.is_tensor(v):
+      tensor_kwargs[k] = v
+    else:
+      nontensor_kwargs[k] = v
+
+  def _fn(**tensors):
+    return fn(**dict(nontensor_kwargs, **tensors))
+
+  return (_fn, tensor_kwargs)
+
+
+def create_estimator_spec_train_op(
+    head_name,
+    optimizer=None,
+    trainable_variables=None,
+    train_op_fn=None,
+    update_ops=None,
+    regularized_training_loss=None,
+    loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE):
+  """Create train_op for estimator_spec.
+
+  Args:
+    head_name: The name of the head.
+    optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the loss
+      in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+      trainable_variables)`, which updates variables to minimize `loss`.
+    trainable_variables: A list or tuple of `Variable` objects to update to
+      minimize `loss`. In Tensorflow 1.x, by default these are the list of
+      variables collected in the graph under the key
+      `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+      collections and GraphKeys, trainable_variables need to be passed
+      explicitly here.
+    train_op_fn: Function that takes a scalar loss `Tensor` and returns
+      `train_op`. Used if `optimizer` is `None`.
+    update_ops: A list or tuple of update ops to be run at training time. For
+      example, layers such as BatchNormalization create mean and variance update
+      ops that need to be run at training time. In Tensorflow 1.x, these are
+      thrown into an UPDATE_OPS collection. As Tensorflow 2.x doesn't have
+      collections, update_ops need to be passed explicitly here.
+    regularized_training_loss: A scalar for total training loss that includes
+      all regularization losses. If you're not using optimizer to generate train
+      op, make sure to scale the loss correctly before passing it in. The loss
+      typically needs to be scaled down by the number of workers.
+    loss_reduction: One of `tf.keras.losses.Reduction` except `NONE`. Describes
+      how to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+  Returns:
+    A train op for EstimatorSpec.
+  """
+  del head_name
+  validate_update_ops(update_ops)
+  with ops.name_scope(''):  # Reset all previous name_scope.
+    # Add training as the name_scope to be compatible with Keras.
+    with ops.name_scope('training'):
+      if optimizer is not None:
+        if train_op_fn is not None:
+          raise ValueError('train_op_fn and optimizer cannot both be set.')
+        validate_v2_optimizer(optimizer)
+        validate_trainable_variables(trainable_variables)
+        # Scale loss by number of replicas.
+        if loss_reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE:
+          regularized_training_loss = losses_utils.scale_loss_for_distribution(
+              regularized_training_loss)
+        train_op = optimizer.get_updates(regularized_training_loss,
+                                         trainable_variables)[0]
+      elif train_op_fn is not None:
+        train_op = train_op_fn(regularized_training_loss)
+      else:
+        raise ValueError('train_op_fn and optimizer cannot both be None.')
+      if update_ops is not None:
+        train_op = tf.group(train_op, *update_ops)
+      return train_op
+
+
+def create_estimator_spec_summary(regularized_training_loss,
+                                  regularization_losses=None,
+                                  summary_key_fn=None):
+  """Create summary for estimator_spec."""
+  with ops.name_scope(''):
+    keys = metric_keys.MetricKeys
+    loss_key = summary_key_fn(keys.LOSS) if summary_key_fn else keys.LOSS
+    tf.compat.v1.summary.scalar(loss_key, regularized_training_loss)
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      regularization_loss_key = (
+          summary_key_fn(keys.LOSS_REGULARIZATION)
+          if summary_key_fn else keys.LOSS_REGULARIZATION)
+      tf.compat.v1.summary.scalar(regularization_loss_key, regularization_loss)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/binary_class_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/binary_class_head.py
new file mode 100644
index 00000000..e1a8a135
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/binary_class_head.py
@@ -0,0 +1,601 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Binary class head."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import metrics
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.ops import lookup_ops
+from tensorflow.python.ops import weights_broadcast_ops
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+@estimator_export('estimator.BinaryClassHead')
+class BinaryClassHead(base_head.Head):
+  """Creates a `Head` for single label binary classification.
+
+  Uses `sigmoid_cross_entropy_with_logits` loss.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, 1]`.
+  In many applications, the shape is `[batch_size, 1]`.
+
+  `labels` must be a dense `Tensor` with shape matching `logits`, namely
+  `[D0, D1, ... DN, 1]`. If `label_vocabulary` given, `labels` must be a string
+  `Tensor` with values from the vocabulary. If `label_vocabulary` is not given,
+  `labels` must be float `Tensor` with values in the interval `[0, 1]`.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, or `[D0, D1, ... DN, 1]`.
+
+  The loss is the weighted sum over the input dimensions. Namely, if the input
+  labels have shape `[batch_size, 1]`, the loss is the weighted sum over
+  `batch_size`.
+
+  Also supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features, loss_reduction)` as arguments and returns loss
+  with shape `[D0, D1, ... DN, 1]`. `loss_fn` must support float `labels` with
+  shape `[D0, D1, ... DN, 1]`. Namely, the head applies `label_vocabulary` to
+  the input labels before passing them to `loss_fn`.
+
+  Usage:
+
+  >>> head = tf.estimator.BinaryClassHead()
+  >>> logits = np.array(((45,), (-41,),), dtype=np.float32)
+  >>> labels = np.array(((1,), (1,),), dtype=np.int32)
+  >>> features = {'x': np.array(((42,),), dtype=np.float32)}
+  >>> # expected_loss = sum(cross_entropy(labels, logits)) / batch_size
+  >>> #               = sum(0, 41) / 2 = 41 / 2 = 20.50
+  >>> loss = head.loss(labels, logits, features=features)
+  >>> print('{:.2f}'.format(loss.numpy()))
+  20.50
+  >>> eval_metrics = head.metrics()
+  >>> updated_metrics = head.update_metrics(
+  ...   eval_metrics, features, logits, labels)
+  >>> for k in sorted(updated_metrics):
+  ...  print('{} : {:.2f}'.format(k, updated_metrics[k].result().numpy()))
+    accuracy : 0.50
+    accuracy_baseline : 1.00
+    auc : 0.00
+    auc_precision_recall : 1.00
+    average_loss : 20.50
+    label/mean : 1.00
+    precision : 1.00
+    prediction/mean : 0.50
+    recall : 0.50
+  >>> preds = head.predictions(logits)
+  >>> print(preds['logits'])
+  tf.Tensor(
+    [[ 45.]
+     [-41.]], shape=(2, 1), dtype=float32)
+
+  Usage with a canned estimator:
+
+  ```python
+  my_head = tf.estimator.BinaryClassHead()
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.BinaryClassHead()
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    thresholds: Iterable of floats in the range `(0, 1)`. For binary
+      classification metrics such as precision and recall, an eval metric is
+      generated for each threshold value. This threshold is applied to the
+      logistic values to determine the binary classification (i.e., above the
+      threshold is `true`, below is `false`.
+    label_vocabulary: A list or tuple of strings representing possible label
+      values. If it is not given, that means labels are already encoded within
+      [0, 1]. If given, labels must be string type and have any value in
+      `label_vocabulary`. Note that errors will be raised if `label_vocabulary`
+      is not provided but labels are strings.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`, namely
+      weighted sum of losses divided by `batch size * label_dimension`.
+    loss_fn: Optional loss function.
+    name: Name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def __init__(self,
+               weight_column=None,
+               thresholds=None,
+               label_vocabulary=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               loss_fn=None,
+               name=None):
+    if label_vocabulary is not None and not isinstance(label_vocabulary,
+                                                       (list, tuple)):
+      raise ValueError(
+          'label_vocabulary should be a list or a tuple. Given type: {}'.format(
+              type(label_vocabulary)))
+    thresholds = tuple(thresholds) if thresholds else tuple()
+    for threshold in thresholds:
+      if (threshold <= 0.0) or (threshold >= 1.0):
+        raise ValueError('thresholds not in (0, 1): {}.'.format((thresholds,)))
+    base_head.validate_loss_reduction(loss_reduction)
+    if loss_fn:
+      base_head.validate_loss_fn_args(loss_fn)
+    self._weight_column = weight_column
+    self._thresholds = thresholds
+    self._label_vocabulary = label_vocabulary
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._name = name
+    # Metric keys.
+    keys = metric_keys.MetricKeys
+    self._loss_mean_key = self._summary_key(keys.LOSS_MEAN)
+    self._accuracy_key = self._summary_key(keys.ACCURACY)
+    self._precision_key = self._summary_key(keys.PRECISION)
+    self._recall_key = self._summary_key(keys.RECALL)
+    self._prediction_mean_key = self._summary_key(keys.PREDICTION_MEAN)
+    self._label_mean_key = self._summary_key(keys.LABEL_MEAN)
+    self._accuracy_baseline_key = self._summary_key(keys.ACCURACY_BASELINE)
+    self._auc_key = self._summary_key(keys.AUC)
+    self._auc_pr_key = self._summary_key(keys.AUC_PR)
+    self._loss_regularization_key = self._summary_key(keys.LOSS_REGULARIZATION)
+    accuracy_keys = []
+    precision_keys = []
+    recall_keys = []
+    for threshold in self._thresholds:
+      accuracy_keys.append(
+          self._summary_key(keys.ACCURACY_AT_THRESHOLD % threshold))
+      precision_keys.append(
+          self._summary_key(keys.PRECISION_AT_THRESHOLD % threshold))
+      recall_keys.append(
+          self._summary_key(keys.RECALL_AT_THRESHOLD % threshold))
+    self._accuracy_keys = tuple(accuracy_keys)
+    self._precision_keys = tuple(precision_keys)
+    self._recall_keys = tuple(recall_keys)
+
+  @property
+  def name(self):
+    """See `base_head.Head` for details."""
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    """See `base_head.Head` for details."""
+    return 1
+
+  @property
+  def loss_reduction(self):
+    """See `base_head.Head` for details."""
+    return self._loss_reduction
+
+  # Attributes for lookup tables in Eager execution. Note that for Graph
+  # execution, the lookup tables are created on demand to make sure the lookup
+  # table is in the same graph as its input tensors for `train` and `eval` of
+  # Estimator (as Estimator recreates graphs for `train`, `eval` and
+  # `predict`).
+  _cached_class_id_table = None
+  _cached_class_string_table = None
+
+  @property
+  def _class_id_table(self):
+    """Creates a lookup table for class_id.
+
+    In eager execution, this lookup table will be lazily created on the first
+    call of `self._class_id_table`, and cached for later use; In graph
+    execution, it will be created on demand.
+
+    Returns:
+      A hash table for lookup.
+    """
+    if self._cached_class_id_table is None or not tf.executing_eagerly():
+      self._cached_class_id_table = lookup_ops.index_table_from_tensor(
+          vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup')
+    return self._cached_class_id_table
+
+  @property
+  def _class_string_table(self):
+    """Creates a lookup table for class_string.
+
+    In eager execution, this lookup table will be lazily created on the first
+    call of `self._class_string_table` and cached for later use; In graph
+    execution, it will be created on demand.
+
+    Returns:
+      A hash table for lookup.
+    """
+    if (self._cached_class_string_table is None or not tf.executing_eagerly()):
+      self._cached_class_string_table = (
+          lookup_ops.index_to_string_table_from_tensor(
+              vocabulary_list=self._label_vocabulary,
+              name='class_string_lookup'))
+    return self._cached_class_string_table
+
+  def _processed_labels(self, logits, labels):
+    """Converts labels to integer id space."""
+    labels = base_head.check_dense_labels_match_logits_and_reshape(
+        labels=labels, logits=logits, expected_labels_dimension=1)
+    if self._label_vocabulary is not None:
+      labels = self._class_id_table.lookup(labels)
+    labels = tf.cast(labels, dtype=tf.dtypes.float32)
+    return base_head.check_label_range(labels, n_classes=2)
+
+  def _unweighted_loss_and_weights(self, logits, labels, features):
+    """Computes unweighted loss and weights."""
+    if self._loss_fn:
+      unweighted_loss = base_head.call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=labels,
+          logits=logits,
+          features=features,
+          expected_loss_dim=1)
+    else:
+      unweighted_loss = tf.compat.v1.nn.sigmoid_cross_entropy_with_logits(
+          labels=labels, logits=logits)
+    weights = base_head.get_weights_and_check_match_logits(
+        features=features, weight_column=self._weight_column, logits=logits)
+    return unweighted_loss, weights
+
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Returns regularized training loss. See `base_head.Head` for details."""
+    del mode  # Unused for this head.
+    with ops.name_scope(
+        'losses', values=(logits, labels, regularization_losses, features)):
+      logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+      labels = self._processed_labels(logits, labels)
+      unweighted_loss, weights = self._unweighted_loss_and_weights(
+          logits, labels, features)
+      training_loss = losses_utils.compute_weighted_loss(
+          unweighted_loss,
+          sample_weight=weights,
+          reduction=self._loss_reduction)
+      regularization_loss = tf.math.add_n(
+          regularization_losses) if regularization_losses is not None else None
+      regularized_training_loss = (
+          training_loss + regularization_loss
+          if regularization_loss is not None else training_loss)
+    return regularized_training_loss
+
+  def predictions(self, logits, keys=None):
+    """Return predictions based on keys.
+
+    See `base_head.Head` for details.
+
+    Args:
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      keys: a list or tuple of prediction keys. Each key can be either the class
+        variable of prediction_keys.PredictionKeys or its string value, such as:
+          prediction_keys.PredictionKeys.CLASSES or 'classes'. If not specified,
+          it will return the predictions for all valid keys.
+
+    Returns:
+      A dict of predictions.
+    """
+    pred_keys = prediction_keys.PredictionKeys
+    valid_keys = [
+        pred_keys.LOGITS, pred_keys.LOGISTIC, pred_keys.PROBABILITIES,
+        pred_keys.CLASS_IDS, pred_keys.CLASSES, pred_keys.ALL_CLASS_IDS,
+        pred_keys.ALL_CLASSES
+    ]
+
+    if keys:
+      base_head.check_prediction_keys(keys, valid_keys)
+    else:
+      keys = valid_keys
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    predictions = {}
+    with ops.name_scope('predictions', values=(logits,)):
+      if pred_keys.LOGITS in keys:
+        predictions[pred_keys.LOGITS] = logits
+      if pred_keys.LOGISTIC in keys:
+        logistic = tf.math.sigmoid(logits, name=pred_keys.LOGISTIC)
+        predictions[pred_keys.LOGISTIC] = logistic
+      two_class_logits = tf.concat((tf.compat.v1.zeros_like(logits), logits),
+                                   axis=-1,
+                                   name='two_class_logits')
+      if pred_keys.PROBABILITIES in keys:
+        probabilities = tf.compat.v1.nn.softmax(
+            two_class_logits, name=pred_keys.PROBABILITIES)
+        predictions[pred_keys.PROBABILITIES] = probabilities
+      if pred_keys.CLASS_IDS in keys or pred_keys.CLASSES in keys:
+        class_ids = tf.compat.v1.math.argmax(
+            two_class_logits, axis=-1, name=pred_keys.CLASS_IDS)
+        class_ids = tf.compat.v1.expand_dims(class_ids, axis=-1)
+        if pred_keys.CLASS_IDS in keys:
+          predictions[pred_keys.CLASS_IDS] = class_ids
+        if pred_keys.CLASSES in keys:
+          if self._label_vocabulary is not None:
+            classes = self._class_string_table.lookup(class_ids)
+          else:
+            classes = tf.strings.as_string(class_ids, name='str_classes')
+          predictions[pred_keys.CLASSES] = classes
+      if pred_keys.ALL_CLASS_IDS in keys:
+        predictions[pred_keys.ALL_CLASS_IDS] = base_head.all_class_ids(
+            logits, n_classes=2)
+      if pred_keys.ALL_CLASSES in keys:
+        predictions[pred_keys.ALL_CLASSES] = base_head.all_classes(
+            logits, n_classes=2, label_vocabulary=self._label_vocabulary)
+      return predictions
+
+  def metrics(self, regularization_losses=None):
+    """Creates metrics. See `base_head.Head` for details."""
+    keys = metric_keys.MetricKeys
+    with ops.name_scope('metrics', values=(regularization_losses,)):
+      # Mean metric.
+      eval_metrics = {}
+      eval_metrics[self._loss_mean_key] = metrics.Mean(name=keys.LOSS_MEAN)
+      eval_metrics[self._accuracy_key] = metrics.Accuracy(name=keys.ACCURACY)
+      eval_metrics[self._precision_key] = metrics.Precision(name=keys.PRECISION)
+      eval_metrics[self._recall_key] = metrics.Recall(name=keys.RECALL)
+      eval_metrics[self._prediction_mean_key] = metrics.Mean(
+          name=keys.PREDICTION_MEAN)
+      eval_metrics[self._label_mean_key] = metrics.Mean(name=keys.LABEL_MEAN)
+      eval_metrics[self._accuracy_baseline_key] = (
+          metrics.Mean(name=keys.ACCURACY_BASELINE))
+      # The default summation_method is "interpolation" in the AUC metric.
+      eval_metrics[self._auc_key] = metrics.AUC(name=keys.AUC)
+      eval_metrics[self._auc_pr_key] = metrics.AUC(curve='PR', name=keys.AUC_PR)
+      if regularization_losses is not None:
+        eval_metrics[self._loss_regularization_key] = metrics.Mean(
+            name=keys.LOSS_REGULARIZATION)
+      for i, threshold in enumerate(self._thresholds):
+        eval_metrics[self._accuracy_keys[i]] = metrics.BinaryAccuracy(
+            name=self._accuracy_keys[i], threshold=threshold)
+        eval_metrics[self._precision_keys[i]] = metrics.Precision(
+            name=self._precision_keys[i], thresholds=threshold)
+        eval_metrics[self._recall_keys[i]] = metrics.Recall(
+            name=self._recall_keys[i], thresholds=threshold)
+    return eval_metrics
+
+  def _update_accuracy_baseline(self, eval_metrics):
+    """Update accuracy baseline metric based on labels mean metric.
+
+    This is the best the model could do by always predicting one class.
+
+    For example, suppose the labels = [0, 1, 0, 1, 1]. So the
+    label_mean.total = 3, label_mean.count = 5, and
+    label_mean = label_mean.total / label_mean.count = 3 / 5 = 0.6
+    By always predicting one class, there are two cases:
+    (1) predicted_labels_0 = [0, 0, 0, 0, 0], accuracy_0 = 2 / 5 = 0.4
+    (2) predicted_labels_1 = [1, 1, 1, 1, 1], accuracy_1 = 3 / 5 = 0.6
+    So the accuracy_baseline = max(accuracy_0, accuracy_1) = 0.6,
+                             = max(label_mean, 1 - label_mean)
+
+    To update the total and count of accuracy_baseline,
+    accuracy_baseline = max(label_mean, 1 - label_mean)
+                      = max(label_mean.total / label_mean.count,
+                            1 - label_mean.total / label_mean.count)
+                      = max(label_mean.total / label_mean.count,
+                      (label_mean.count - label_mean.total) / label_mean.count)
+    So accuracy_baseline.total = max(label_mean.total,
+                                    (label_mean.count - label_mean.total))
+    accuracy_baseline.count = label_mean.count
+
+    Args:
+      eval_metrics: A `dict` of metrics to be updated.
+    """
+    label_mean_metric = eval_metrics[self._label_mean_key]
+    accuracy_baseline_metric = eval_metrics[self._accuracy_baseline_key]
+    accuracy_baseline_metric.add_update(tf.no_op())
+    accuracy_baseline_metric.total = tf.math.maximum(
+        label_mean_metric.total,
+        label_mean_metric.count - label_mean_metric.total)
+    accuracy_baseline_metric.count = label_mean_metric.count
+
+  def _update_auc(self, auc_metric, labels, predictions, weights=None):
+    predictions = tf.cast(predictions, dtype=tf.dtypes.float32)
+    if weights is not None:
+      weights = weights_broadcast_ops.broadcast_weights(weights, predictions)
+    auc_metric.update_state(
+        y_true=labels, y_pred=predictions, sample_weight=weights)
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates eval metrics. See `base_head.Head` for details."""
+    preds = self.predictions(logits)
+    class_ids = preds[prediction_keys.PredictionKeys.CLASS_IDS]
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    labels = self._processed_labels(logits, labels)
+    unweighted_loss, weights = self._unweighted_loss_and_weights(
+        logits, labels, features)
+    # Update metrics.
+    eval_metrics[self._loss_mean_key].update_state(
+        values=unweighted_loss, sample_weight=weights)
+    eval_metrics[self._accuracy_key].update_state(
+        y_true=labels, y_pred=class_ids, sample_weight=weights)
+    eval_metrics[self._precision_key].update_state(
+        y_true=labels, y_pred=class_ids, sample_weight=weights)
+    eval_metrics[self._recall_key].update_state(
+        y_true=labels, y_pred=class_ids, sample_weight=weights)
+    logistic_key = prediction_keys.PredictionKeys.LOGISTIC
+    predictions = self.predictions(logits, [logistic_key])
+    logistic = predictions[logistic_key]
+    base_head.update_metric_with_broadcast_weights(
+        eval_metrics[self._prediction_mean_key], logistic, weights)
+    base_head.update_metric_with_broadcast_weights(
+        eval_metrics[self._label_mean_key], labels, weights)
+    self._update_accuracy_baseline(eval_metrics)
+    self._update_auc(
+        auc_metric=eval_metrics[self._auc_key],
+        labels=labels,
+        predictions=logistic,
+        weights=weights)
+    self._update_auc(
+        auc_metric=eval_metrics[self._auc_pr_key],
+        labels=labels,
+        predictions=logistic,
+        weights=weights)
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      eval_metrics[self._loss_regularization_key].update_state(
+          values=regularization_loss)
+    for i in range(len(self._thresholds)):
+      eval_metrics[self._accuracy_keys[i]].update_state(
+          y_true=labels, y_pred=logistic, sample_weight=weights)
+      eval_metrics[self._precision_keys[i]].update_state(
+          y_true=labels, y_pred=logistic, sample_weight=weights)
+      eval_metrics[self._recall_keys[i]].update_state(
+          y_true=labels, y_pred=logistic, sample_weight=weights)
+    return eval_metrics
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 update_ops=None,
+                                 regularization_losses=None):
+    """Returns an `EstimatorSpec`.
+
+    Args:
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      mode: Estimator's `ModeKeys`.
+      logits: Logits `Tensor` with shape `[D0, D1, ... DN, 1]`. For many
+        applications, the shape is `[batch_size, 1]`.
+      labels: Labels integer or string `Tensor` with shape matching `logits`,
+        namely `[D0, D1, ... DN, 1]` or `[D0, D1, ... DN]`. `labels` is required
+        argument when `mode` equals `TRAIN` or `EVAL`.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      `EstimatorSpec`.
+
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    with ops.name_scope(self._name, 'head'):
+      # Predict.
+      pred_keys = prediction_keys.PredictionKeys
+      predictions = self.predictions(logits)
+      if mode == ModeKeys.PREDICT:
+        probabilities = predictions[pred_keys.PROBABILITIES]
+        logistic = predictions[pred_keys.LOGISTIC]
+        classifier_output = base_head.classification_output(
+            scores=probabilities,
+            n_classes=2,
+            label_vocabulary=self._label_vocabulary)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                base_head.DEFAULT_SERVING_KEY: classifier_output,
+                base_head.CLASSIFY_SERVING_KEY: classifier_output,
+                base_head.REGRESS_SERVING_KEY:
+                    export_output.RegressionOutput(value=logistic),
+                base_head.PREDICT_SERVING_KEY:
+                    export_output.PredictOutput(predictions)
+            })
+      regularized_training_loss = self.loss(
+          logits=logits,
+          labels=labels,
+          features=features,
+          mode=mode,
+          regularization_losses=regularization_losses)
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        eval_metrics = self.metrics(regularization_losses=regularization_losses)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=base_head.create_eval_metrics_tuple(
+                self.update_metrics, {
+                    'eval_metrics': eval_metrics,
+                    'features': features,
+                    'logits': logits,
+                    'labels': labels,
+                    'regularization_losses': regularization_losses
+                }))
+      # Train.
+      train_op = base_head.create_estimator_spec_train_op(
+          head_name=self._name,
+          optimizer=optimizer,
+          train_op_fn=train_op_fn,
+          update_ops=update_ops,
+          trainable_variables=trainable_variables,
+          regularized_training_loss=regularized_training_loss,
+          loss_reduction=self._loss_reduction)
+    # Create summary.
+    base_head.create_estimator_spec_summary(
+        regularized_training_loss=regularized_training_loss,
+        regularization_losses=regularization_losses,
+        summary_key_fn=self._summary_key)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/head_utils.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/head_utils.py
new file mode 100644
index 00000000..1c55674d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/head_utils.py
@@ -0,0 +1,102 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utilities for heads and unit tests."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+
+from tensorflow.core.framework import summary_pb2
+from tensorflow_estimator.python.estimator.head import binary_class_head
+from tensorflow_estimator.python.estimator.head import multi_class_head
+
+_DEFAULT_SERVING_KEY = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+
+
+def binary_or_multi_class_head(n_classes, weight_column, label_vocabulary,
+                               loss_reduction):
+  """Creates either binary or multi-class head.
+
+  Args:
+    n_classes: Number of label classes.
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example. If it is a string, it is
+      used as a key to fetch weight tensor from the `features`. If it is a
+      `NumericColumn`, raw tensor is fetched by key `weight_column.key`, then
+      weight_column.normalizer_fn is applied on it to get weight tensor.
+    label_vocabulary: A list of strings represents possible label values. If
+      given, labels must be string type and have any value in
+      `label_vocabulary`. If it is not given, that means labels are already
+      encoded as integer or float within [0, 1] for `n_classes=2` and encoded as
+      integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . Also there
+      will be errors if vocabulary is not provided and labels are string.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Defines how to
+      reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
+
+  Returns:
+    A `Head` instance.
+  """
+  if n_classes == 2:
+    head = binary_class_head.BinaryClassHead(
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+  else:
+    head = multi_class_head.MultiClassHead(
+        n_classes,
+        weight_column=weight_column,
+        label_vocabulary=label_vocabulary,
+        loss_reduction=loss_reduction)
+  return head
+
+
+def _initialize_variables(test_case, scaffold):
+  scaffold.finalize()
+  test_case.assertIsNone(scaffold.init_feed_dict)
+  test_case.assertIsNone(scaffold.init_fn)
+  scaffold.init_op.run()
+  scaffold.ready_for_local_init_op.eval()
+  scaffold.local_init_op.run()
+  scaffold.ready_op.eval()
+  test_case.assertIsNotNone(scaffold.saver)
+
+
+def _assert_simple_summaries(test_case,
+                             expected_summaries,
+                             summary_str,
+                             tol=1e-6):
+  """Assert summary the specified simple values.
+
+  Args:
+    test_case: test case.
+    expected_summaries: Dict of expected tags and simple values.
+    summary_str: Serialized `summary_pb2.Summary`.
+    tol: Tolerance for relative and absolute.
+  """
+  summary = summary_pb2.Summary()
+  summary.ParseFromString(summary_str)
+  test_case.assertAllClose(
+      expected_summaries, {v.tag: v.simple_value for v in summary.value},
+      rtol=tol,
+      atol=tol)
+
+
+def _assert_no_hooks(test_case, spec):
+  test_case.assertAllEqual([], spec.training_chief_hooks)
+  test_case.assertAllEqual([], spec.training_hooks)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_class_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_class_head.py
new file mode 100644
index 00000000..cbf5a2b3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_class_head.py
@@ -0,0 +1,496 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Multi class head."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import metrics
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.ops import lookup_ops
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+@estimator_export('estimator.MultiClassHead')
+class MultiClassHead(base_head.Head):
+  """Creates a `Head` for multi class classification.
+
+  Uses `sparse_softmax_cross_entropy` loss.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, n_classes]`.
+  In many applications, the shape is `[batch_size, n_classes]`.
+
+  `labels` must be a dense `Tensor` with shape matching `logits`, namely
+  `[D0, D1, ... DN, 1]`. If `label_vocabulary` given, `labels` must be a string
+  `Tensor` with values from the vocabulary. If `label_vocabulary` is not given,
+  `labels` must be an integer `Tensor` with values specifying the class index.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, or `[D0, D1, ... DN, 1]`.
+
+  The loss is the weighted sum over the input dimensions. Namely, if the input
+  labels have shape `[batch_size, 1]`, the loss is the weighted sum over
+  `batch_size`.
+
+  Also supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features, loss_reduction)` as arguments and returns
+  unreduced loss with shape `[D0, D1, ... DN, 1]`. `loss_fn` must support
+  integer `labels` with shape `[D0, D1, ... DN, 1]`. Namely, the head applies
+  `label_vocabulary` to the input labels before passing them to `loss_fn`.
+
+  Usage:
+
+  >>> n_classes = 3
+  >>> head = tf.estimator.MultiClassHead(n_classes)
+  >>> logits = np.array(((10, 0, 0), (0, 10, 0),), dtype=np.float32)
+  >>> labels = np.array(((1,), (1,)), dtype=np.int64)
+  >>> features = {'x': np.array(((42,),), dtype=np.int32)}
+  >>> # expected_loss = sum(cross_entropy(labels, logits)) / batch_size
+  >>> #               = sum(10, 0) / 2 = 5.
+  >>> loss = head.loss(labels, logits, features=features)
+  >>> print('{:.2f}'.format(loss.numpy()))
+  5.00
+  >>> eval_metrics = head.metrics()
+  >>> updated_metrics = head.update_metrics(
+  ...   eval_metrics, features, logits, labels)
+  >>> for k in sorted(updated_metrics):
+  ...   print('{} : {:.2f}'.format(k, updated_metrics[k].result().numpy()))
+  accuracy : 0.50
+  average_loss : 5.00
+  >>> preds = head.predictions(logits)
+  >>> print(preds['logits'])
+  tf.Tensor(
+    [[10.  0.  0.]
+     [ 0. 10.  0.]], shape=(2, 3), dtype=float32)
+
+  Usage with a canned estimator:
+
+  ```python
+  my_head = tf.estimator.MultiClassHead(n_classes=3)
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.MultiClassHead(n_classes=3)
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    n_classes: Number of classes, must be greater than 2 (for 2 classes, use
+      `BinaryClassHead`).
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    label_vocabulary: A list or tuple of strings representing possible label
+      values. If it is not given, that means labels are already encoded as an
+      integer within [0, n_classes). If given, labels must be of string type and
+      have any value in `label_vocabulary`. Note that errors will be raised if
+      `label_vocabulary` is not provided but labels are strings. If both
+      `n_classes` and `label_vocabulary` are provided, `label_vocabulary` should
+      contain exactly `n_classes` items.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`, namely
+      weighted sum of losses divided by `batch size * label_dimension`.
+    loss_fn: Optional loss function.
+    name: Name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def __init__(self,
+               n_classes,
+               weight_column=None,
+               label_vocabulary=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               loss_fn=None,
+               name=None):
+    if n_classes is None:
+      raise ValueError('n_classes cannot be None')
+    if label_vocabulary is not None and not isinstance(label_vocabulary,
+                                                       (list, tuple)):
+      raise ValueError(
+          'label_vocabulary should be a list or a tuple. Given type: {}'.format(
+              type(label_vocabulary)))
+    if label_vocabulary is not None and len(label_vocabulary) != n_classes:
+      raise ValueError(
+          '"label_vocabulary" does not have "n_classes" items. '
+          'len(label_vocabulary)={}, n_classes={}, label_vocabulary={}'.format(
+              len(label_vocabulary), n_classes, label_vocabulary))
+    base_head.validate_loss_reduction(loss_reduction)
+    if loss_fn:
+      base_head.validate_loss_fn_args(loss_fn)
+    self._n_classes = base_head.validate_n_classes(n_classes)
+    self._weight_column = weight_column
+    self._label_vocabulary = label_vocabulary
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._name = name
+    # Metric keys.
+    keys = metric_keys.MetricKeys
+    self._loss_mean_key = self._summary_key(keys.LOSS_MEAN)
+    self._accuracy_key = self._summary_key(keys.ACCURACY)
+    self._loss_regularization_key = self._summary_key(keys.LOSS_REGULARIZATION)
+
+  @property
+  def name(self):
+    """See `base_head.Head` for details."""
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    """See `base_head.Head` for details."""
+    return self._n_classes
+
+  @property
+  def loss_reduction(self):
+    """See `base_head.Head` for details."""
+    return self._loss_reduction
+
+  # Attributes for lookup tables in Eager execution. Note that for Graph
+  # execution, the lookup tables are created on demanded to make sure the
+  # lookup table is in the same graph as its input tensors for `train` and
+  # 'eval' of Estimator (as Estimator recreates graphs for `train`, `eval` and
+  # `predict`).
+  _cached_class_id_table = None
+  _cached_class_string_table = None
+
+  @property
+  def _class_id_table(self):
+    """Creates a lookup table for class_id.
+
+    In eager execution, this lookup table will be lazily created on the first
+    call of `self._class_id_table`, and cached for later use; In graph
+    execution, it will be created on demand.
+
+    Returns:
+      A hash table for lookup.
+    """
+    if self._cached_class_id_table is None or not tf.executing_eagerly():
+      self._cached_class_id_table = lookup_ops.index_table_from_tensor(
+          vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup')
+    return self._cached_class_id_table
+
+  @property
+  def _class_string_table(self):
+    """Creates a lookup table for class_string.
+
+    In eager execution, this lookup table will be lazily created on the first
+    call of `self._class_string_table` and cached for later use; In graph
+    execution, it will be created on demand.
+
+    Returns:
+      A hash table for lookup.
+    """
+    if (self._cached_class_string_table is None or not tf.executing_eagerly()):
+      self._cached_class_string_table = (
+          lookup_ops.index_to_string_table_from_tensor(
+              vocabulary_list=self._label_vocabulary,
+              name='class_string_lookup'))
+    return self._cached_class_string_table
+
+  def _processed_labels(self, logits, labels):
+    """Converts labels to integer id space."""
+    labels = base_head.check_dense_labels_match_logits_and_reshape(
+        labels=labels, logits=logits, expected_labels_dimension=1)
+    if self._label_vocabulary is None:
+      if not labels.dtype.is_integer:
+        raise ValueError(
+            'Labels dtype should be integer. Instead got {}.'.format(
+                labels.dtype))
+      label_ids = labels
+    else:
+      if labels.dtype != tf.dtypes.string:
+        raise ValueError('Labels dtype should be string if there is a '
+                         'vocabulary. Instead got {}'.format(labels.dtype))
+      label_ids = self._class_id_table.lookup(labels)
+    return base_head.check_label_range(label_ids, self._n_classes)
+
+  def _unweighted_loss_and_weights(self, logits, label_ids, features):
+    """Computes loss spec."""
+    if self._loss_fn:
+      unweighted_loss = base_head.call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=label_ids,
+          logits=logits,
+          features=features,
+          expected_loss_dim=1)
+    else:
+      unweighted_loss = tf.compat.v1.losses.sparse_softmax_cross_entropy(
+          labels=label_ids,
+          logits=logits,
+          reduction=tf.compat.v1.losses.Reduction.NONE)
+      # Restore the squeezed dim, so unweighted_loss matches the weights shape.
+      unweighted_loss = tf.compat.v1.expand_dims(unweighted_loss, axis=-1)
+    weights = base_head.get_weights_and_check_match_logits(
+        features=features, weight_column=self._weight_column, logits=logits)
+    return unweighted_loss, weights
+
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Returns regularized training loss. See `base_head.Head` for details."""
+    del mode  # Unused for this head.
+    with ops.name_scope(
+        'losses', values=(logits, labels, regularization_losses, features)):
+      logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+      label_ids = self._processed_labels(logits, labels)
+      unweighted_loss, weights = self._unweighted_loss_and_weights(
+          logits, label_ids, features)
+      training_loss = losses_utils.compute_weighted_loss(
+          unweighted_loss,
+          sample_weight=weights,
+          reduction=self._loss_reduction)
+      regularization_loss = tf.math.add_n(
+          regularization_losses) if regularization_losses is not None else None
+      regularized_training_loss = (
+          training_loss + regularization_loss
+          if regularization_loss is not None else training_loss)
+    return regularized_training_loss
+
+  def predictions(self, logits, keys=None):
+    """Return predictions based on keys.
+
+    See `base_head.Head` for details.
+
+    Args:
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      keys: a list or tuple of prediction keys. Each key can be either the class
+        variable of prediction_keys.PredictionKeys or its string value, such as:
+          prediction_keys.PredictionKeys.CLASSES or 'classes'. If not specified,
+          it will return the predictions for all valid keys.
+
+    Returns:
+      A dict of predictions.
+    """
+    pred_keys = prediction_keys.PredictionKeys
+    valid_keys = [
+        pred_keys.LOGITS, pred_keys.PROBABILITIES, pred_keys.CLASS_IDS,
+        pred_keys.CLASSES, pred_keys.ALL_CLASS_IDS, pred_keys.ALL_CLASSES
+    ]
+    if keys:
+      base_head.check_prediction_keys(keys, valid_keys)
+    else:
+      keys = valid_keys
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    predictions = {}
+    with ops.name_scope('predictions', values=(logits,)):
+      if pred_keys.LOGITS in keys:
+        predictions[pred_keys.LOGITS] = logits
+      if pred_keys.PROBABILITIES in keys:
+        probabilities = tf.compat.v1.nn.softmax(
+            logits, name=pred_keys.PROBABILITIES)
+        predictions[pred_keys.PROBABILITIES] = probabilities
+      if pred_keys.CLASS_IDS in keys or pred_keys.CLASSES in keys:
+        # class_ids's shape is [D0, D1, ... DN].
+        class_ids = tf.compat.v1.math.argmax(
+            logits, axis=-1, name=pred_keys.CLASS_IDS)
+        # Expand to [batch_size, 1].
+        class_ids = tf.compat.v1.expand_dims(class_ids, axis=-1)
+        if pred_keys.CLASS_IDS in keys:
+          predictions[pred_keys.CLASS_IDS] = class_ids
+        if pred_keys.CLASSES in keys:
+          if self._label_vocabulary:
+            classes = self._class_string_table.lookup(class_ids)
+          else:
+            classes = tf.strings.as_string(class_ids, name='str_classes')
+          predictions[pred_keys.CLASSES] = classes
+      if pred_keys.ALL_CLASS_IDS in keys:
+        predictions[pred_keys.ALL_CLASS_IDS] = base_head.all_class_ids(
+            logits, n_classes=self._n_classes)
+      if pred_keys.ALL_CLASSES in keys:
+        predictions[pred_keys.ALL_CLASSES] = base_head.all_classes(
+            logits,
+            n_classes=self._n_classes,
+            label_vocabulary=self._label_vocabulary)
+      return predictions
+
+  def metrics(self, regularization_losses=None):
+    """Creates metrics. See `base_head.Head` for details."""
+    keys = metric_keys.MetricKeys
+    with ops.name_scope('metrics', values=(regularization_losses,)):
+      # Mean metric.
+      eval_metrics = {}
+      eval_metrics[self._loss_mean_key] = metrics.Mean(name=keys.LOSS_MEAN)
+      if regularization_losses is not None:
+        eval_metrics[self._loss_regularization_key] = metrics.Mean(
+            name=keys.LOSS_REGULARIZATION)
+      # Accuracy metric.
+      eval_metrics[self._accuracy_key] = metrics.Accuracy(name=keys.ACCURACY)
+    return eval_metrics
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates eval metrics. See `base_head.Head` for details."""
+    preds = self.predictions(logits)
+    class_ids = preds[prediction_keys.PredictionKeys.CLASS_IDS]
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    label_ids = self._processed_labels(logits, labels)
+    unweighted_loss, weights = self._unweighted_loss_and_weights(
+        logits, label_ids, features)
+
+    # Update metrics.
+    eval_metrics[self._loss_mean_key].update_state(
+        values=unweighted_loss, sample_weight=weights)
+    eval_metrics[self._accuracy_key].update_state(
+        y_true=label_ids, y_pred=class_ids, sample_weight=weights)
+
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      eval_metrics[self._loss_regularization_key].update_state(
+          values=regularization_loss)
+    return eval_metrics
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 update_ops=None,
+                                 regularization_losses=None):
+    """Returns a `model_fn._TPUEstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      labels: Labels integer or string `Tensor` with shape matching `logits`,
+        namely `[D0, D1, ... DN, 1]` or `[D0, D1, ... DN]`. `labels` is required
+        argument when `mode` equals `TRAIN` or `EVAL`.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        use the default `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the
+        head to avoid scaling errors.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec` instance.
+
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    with ops.name_scope(self._name, 'head'):
+      # Predict.
+      pred_keys = prediction_keys.PredictionKeys
+      predictions = self.predictions(logits)
+      if mode == ModeKeys.PREDICT:
+        probabilities = predictions[pred_keys.PROBABILITIES]
+        classifier_output = base_head.classification_output(
+            scores=probabilities,
+            n_classes=self._n_classes,
+            label_vocabulary=self._label_vocabulary)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                base_head.DEFAULT_SERVING_KEY:
+                    classifier_output,
+                base_head.CLASSIFY_SERVING_KEY:
+                    classifier_output,
+                base_head.PREDICT_SERVING_KEY:
+                    export_output.PredictOutput(predictions)
+            })
+      regularized_training_loss = self.loss(
+          logits=logits,
+          labels=labels,
+          features=features,
+          mode=mode,
+          regularization_losses=regularization_losses)
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        eval_metrics = self.metrics(regularization_losses=regularization_losses)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=base_head.create_eval_metrics_tuple(
+                self.update_metrics, {
+                    'eval_metrics': eval_metrics,
+                    'features': features,
+                    'logits': logits,
+                    'labels': labels,
+                    'regularization_losses': regularization_losses
+                }))
+      # Train.
+      train_op = base_head.create_estimator_spec_train_op(
+          head_name=self._name,
+          optimizer=optimizer,
+          train_op_fn=train_op_fn,
+          update_ops=update_ops,
+          trainable_variables=trainable_variables,
+          regularized_training_loss=regularized_training_loss,
+          loss_reduction=self._loss_reduction)
+    # Create summary.
+    base_head.create_estimator_spec_summary(
+        regularized_training_loss=regularized_training_loss,
+        regularization_losses=regularization_losses,
+        summary_key_fn=self._summary_key)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_head.py
new file mode 100644
index 00000000..8f989c65
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_head.py
@@ -0,0 +1,548 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Multi head class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import metrics
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+def _no_op_train_fn(loss):
+  del loss
+  return tf.no_op()
+
+
+def _default_export_output(export_outputs, head_name):
+  """Extracts the default export output from the given export_outputs dict."""
+  if len(export_outputs) == 1:
+    return next(six.itervalues(export_outputs))
+  try:
+    return export_outputs[base_head.DEFAULT_SERVING_KEY]
+  except KeyError:
+    raise ValueError(
+        '{} did not specify default export_outputs. '
+        'Given: {} '
+        'Suggested fix: Use one of the heads in tf.estimator, or include '
+        'key {} in export_outputs.'.format(head_name, export_outputs,
+                                           base_head.DEFAULT_SERVING_KEY))
+
+
+@estimator_export('estimator.MultiHead')
+class MultiHead(base_head.Head):
+  """Creates a `Head` for multi-objective learning.
+
+  This class merges the output of multiple `Head` objects. Specifically:
+
+  * For training, sums losses of each head, calls `train_op_fn` with this
+    final loss.
+  * For eval, merges metrics by adding `head.name` suffix to the keys in eval
+    metrics, such as `precision/head1.name`, `precision/head2.name`.
+  * For prediction, merges predictions and updates keys in prediction dict to a
+    2-tuple, `(head.name, prediction_key)`. Merges `export_outputs` such that
+    by default the first head is served.
+
+  Usage:
+
+  >>> head1 = tf.estimator.MultiLabelHead(n_classes=2, name='head1')
+  >>> head2 = tf.estimator.MultiLabelHead(n_classes=3, name='head2')
+  >>> multi_head = tf.estimator.MultiHead([head1, head2])
+  >>> logits = {
+  ...    'head1': np.array([[-10., 10.], [-15., 10.]], dtype=np.float32),
+  ...    'head2': np.array([[20., -20., 20.], [-30., 20., -20.]],
+  ...    dtype=np.float32),}
+  >>> labels = {
+  ...    'head1': np.array([[1, 0], [1, 1]], dtype=np.int64),
+  ...    'head2': np.array([[0, 1, 0], [1, 1, 0]], dtype=np.int64),}
+  >>> features = {'x': np.array(((42,),), dtype=np.float32)}
+  >>> # For large logits, sigmoid cross entropy loss is approximated as:
+  >>> # loss = labels * (logits < 0) * (-logits) +
+  >>> #        (1 - labels) * (logits > 0) * logits =>
+  >>> # head1: expected_unweighted_loss = [[10., 10.], [15., 0.]]
+  >>> # loss1 = ((10 + 10) / 2 + (15 + 0) / 2) / 2 = 8.75
+  >>> # head2: expected_unweighted_loss = [[20., 20., 20.], [30., 0., 0]]
+  >>> # loss2 = ((20 + 20 + 20) / 3 + (30 + 0 + 0) / 3) / 2 = 15.00
+  >>> # loss = loss1 + loss2 = 8.75 + 15.00 = 23.75
+  >>> loss = multi_head.loss(labels, logits, features=features)
+  >>> print('{:.2f}'.format(loss.numpy()))
+  23.75
+  >>> eval_metrics = multi_head.metrics()
+  >>> updated_metrics = multi_head.update_metrics(
+  ...   eval_metrics, features, logits, labels)
+  >>> for k in sorted(updated_metrics):
+  ...  print('{} : {:.2f}'.format(k, updated_metrics[k].result().numpy()))
+  auc/head1 : 0.17
+  auc/head2 : 0.33
+  auc_precision_recall/head1 : 0.60
+  auc_precision_recall/head2 : 0.40
+  average_loss/head1 : 8.75
+  average_loss/head2 : 15.00
+  loss/head1 : 8.75
+  loss/head2 : 15.00
+  >>> preds = multi_head.predictions(logits)
+  >>> print(preds[('head1', 'logits')])
+  tf.Tensor(
+    [[-10.  10.]
+     [-15.  10.]], shape=(2, 2), dtype=float32)
+
+  Usage with a canned estimator:
+
+  ```python
+  # In `input_fn`, specify labels as a dict keyed by head name:
+  def input_fn():
+    features = ...
+    labels1 = ...
+    labels2 = ...
+    return features, {'head1.name': labels1, 'head2.name': labels2}
+
+  # In `model_fn`, specify logits as a dict keyed by head name:
+  def model_fn(features, labels, mode):
+    # Create simple heads and specify head name.
+    head1 = tf.estimator.MultiClassHead(n_classes=3, name='head1')
+    head2 = tf.estimator.BinaryClassHead(name='head2')
+    # Create MultiHead from two simple heads.
+    head = tf.estimator.MultiHead([head1, head2])
+    # Create logits for each head, and combine them into a dict.
+    logits1, logits2 = logit_fn()
+    logits = {'head1.name': logits1, 'head2.name': logits2}
+    # Return the merged EstimatorSpec
+    return head.create_estimator_spec(..., logits=logits, ...)
+
+  # Create an estimator with this model_fn.
+  estimator = tf.estimator.Estimator(model_fn=model_fn)
+  estimator.train(input_fn=input_fn)
+  ```
+
+  Also supports `logits` as a `Tensor` of shape
+  `[D0, D1, ... DN, logits_dimension]`. It will split the `Tensor` along the
+  last dimension and distribute it appropriately among the heads. E.g.:
+
+  ```python
+  # Input logits.
+  logits = np.array([[-1., 1., 2., -2., 2.], [-1.5, 1., -3., 2., -2.]],
+                    dtype=np.float32)
+  # Suppose head1 and head2 have the following logits dimension.
+  head1.logits_dimension = 2
+  head2.logits_dimension = 3
+  # After splitting, the result will be:
+  logits_dict = {'head1_name': [[-1., 1.], [-1.5, 1.]],
+                 'head2_name':  [[2., -2., 2.], [-3., 2., -2.]]}
+  ```
+
+  Usage:
+
+  ```python
+  def model_fn(features, labels, mode):
+    # Create simple heads and specify head name.
+    head1 = tf.estimator.MultiClassHead(n_classes=3, name='head1')
+    head2 = tf.estimator.BinaryClassHead(name='head2')
+    # Create multi-head from two simple heads.
+    head = tf.estimator.MultiHead([head1, head2])
+    # Create logits for the multihead. The result of logits is a `Tensor`.
+    logits = logit_fn(logits_dimension=head.logits_dimension)
+    # Return the merged EstimatorSpec
+    return head.create_estimator_spec(..., logits=logits, ...)
+  ```
+
+  Args:
+    heads: List or tuple of `Head` instances. All heads must have `name`
+      specified. The first head in the list is the default used at serving time.
+    head_weights: Optional list of weights, same length as `heads`. Used when
+      merging losses to calculate the weighted sum of losses from each head. If
+      `None`, all losses are weighted equally.
+  """
+
+  def __init__(self, heads, head_weights=None):
+    if not heads:
+      raise ValueError('Must specify heads. Given: {}'.format(heads))
+    if head_weights:
+      if len(head_weights) != len(heads):
+        raise ValueError(
+            'heads and head_weights must have the same size. '
+            'Given len(heads): {}. Given len(head_weights): {}.'.format(
+                len(heads), len(head_weights)))
+    self._logits_dimension = 0
+    for head in heads:
+      if head.name is None:
+        raise ValueError(
+            'All given heads must have name specified. Given: {}'.format(head))
+      self._logits_dimension += head.logits_dimension
+    self._heads = tuple(heads)
+    self._head_weights = tuple(head_weights) if head_weights else tuple()
+    # Metric keys.
+    keys = metric_keys.MetricKeys
+    self._loss_regularization_key = self._summary_key(keys.LOSS_REGULARIZATION)
+    loss_keys = []
+    for head in self._heads:
+      loss_keys.append('{}/{}'.format(keys.LOSS, head.name))
+    self._loss_keys = tuple(loss_keys)
+
+  @property
+  def name(self):
+    """See `base_head.Head` for details."""
+    return '_'.join([h.name for h in self._heads])
+
+  @property
+  def logits_dimension(self):
+    """See `base_head.Head` for details."""
+    return self._logits_dimension
+
+  @property
+  def loss_reduction(self):
+    """See `base_head.Head` for details."""
+    loss_reductions = [head.loss_reduction for head in self._heads]
+    if len(set(loss_reductions)) > 1:
+      raise ValueError(
+          'The loss_reduction must be the same for different heads. '
+          'Given: {}'.format(loss_reductions))
+    return loss_reductions[0]
+
+  def _split_logits(self, logits):
+    """Splits logits along the last dimension and returns a dict.
+
+    If the input logits is not a dict, splitting is applied based on the logits
+    dimension of each head.
+    For example:
+
+    ```python
+    # head1.logits_dimension = 2
+    # head2.logits_dimension = 3
+    head1 = tf.estimator.MultiLabelHead(n_classes=2, name='head1_name')
+    head2 = tf.estimator.MultiClassHead(n_classes=3, name='head2_name')
+    multi_head = tf.estimator.MultiHead([head1, head2])
+    # Input logits
+    logits = np.array([[-1., 1., 2., -2., 2.], [-1.5, 1., -3., 2., -2.]],
+                      dtype=np.float32)
+    # As logits is not a dict, _split_logits is applied and returns the
+    # logits_dict as
+    logits_dict = {'head1_name': [[-1., 1.], [-1.5, 1.]],
+                   'head2_name':  [[2., -2., 2.], [-3., 2., -2.]]}
+    ```
+    Args:
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+
+    Returns:
+      logits_dict: A dict of logits for each head.
+    """
+    logits_dict = {}
+    with ops.name_scope('split_logits', values=[logits]):
+      logits = ops.convert_to_tensor(logits)
+      logits_dimensions = [head.logits_dimension for head in self._heads]
+      total_logits_dimension = sum(logits_dimensions)
+      logits_tensor_shape = logits.shape.as_list()
+      last_dimension_size = logits_tensor_shape[-1]
+      if last_dimension_size is not None:
+        if last_dimension_size != total_logits_dimension:
+          raise ValueError(
+              'Could not split logits of shape %r among the heads with '
+              'individual logits dimensions: %r. The last dimension of the '
+              'logits tensor should equal %d but is %d.' %
+              ((logits_tensor_shape, logits_dimensions, last_dimension_size,
+                total_logits_dimension)))
+
+      # TODO(b/119617064): unify eager and graph implementations
+      if tf.executing_eagerly():
+        logits_shape = logits._shape_tuple()  # pylint: disable=protected-access
+        batch_shape = logits_shape[:-1]
+      else:
+        batch_shape = tf.compat.v1.shape(logits)[:-1]
+      zeros_like_batch_shape = tf.compat.v1.zeros_like(batch_shape)
+      minus_ones_like_batch_shape = -1 * tf.compat.v1.ones_like(batch_shape)
+      begin_idx = 0
+      for head in self._heads:
+        begin_tensor = tf.concat([zeros_like_batch_shape, [begin_idx]], axis=0)
+        size_tensor = tf.concat(
+            [minus_ones_like_batch_shape, [head.logits_dimension]], axis=0)
+        logits_dict[head.name] = tf.slice(
+            logits, begin=begin_tensor, size=size_tensor)
+        begin_idx += head.logits_dimension
+    return logits_dict
+
+  def _check_logits_and_labels(self, logits, labels=None):
+    """Validates the keys of logits and labels."""
+    head_names = []
+    for head in self._heads:
+      head_names.append(head.name)
+    # Checks logits keys and splits it if it's not a dict
+    if isinstance(logits, dict):
+      logits_missing_names = list(set(head_names) - set(list(logits)))
+      if logits_missing_names:
+        raise ValueError('logits has missing values for head(s): {}'.format(
+            logits_missing_names))
+      logits_dict = logits
+    else:
+      logits_dict = self._split_logits(logits)
+    # Checks labels type and its keys
+    if labels is not None:
+      if not isinstance(labels, dict):
+        raise ValueError('labels must be a dict. Given: {}'.format(labels))
+      labels_missing_names = list(set(head_names) - set(list(labels)))
+      if labels_missing_names:
+        raise ValueError('labels has missing values for head(s): {}'.format(
+            labels_missing_names))
+    return logits_dict
+
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Returns regularized training loss. See `base_head.Head` for details."""
+    logits_dict = self._check_logits_and_labels(logits, labels)
+    training_losses = []
+    for head in self._heads:
+      training_loss = head.loss(
+          logits=logits_dict[head.name],
+          labels=labels[head.name],
+          features=features,
+          mode=mode)
+      training_losses.append(training_loss)
+
+    training_losses = tuple(training_losses)
+    with ops.name_scope(
+        'merge_losses',
+        values=training_losses + (self._head_weights or tuple())):
+      if self._head_weights:
+        head_weighted_training_losses = []
+        for training_loss, head_weight in zip(training_losses,
+                                              self._head_weights):
+          head_weighted_training_losses.append(
+              tf.math.multiply(training_loss, head_weight))
+        training_losses = head_weighted_training_losses
+      merged_training_loss = tf.math.add_n(training_losses)
+      regularization_loss = tf.math.add_n(
+          regularization_losses) if regularization_losses is not None else None
+      regularized_training_loss = (
+          merged_training_loss + regularization_loss
+          if regularization_loss is not None else merged_training_loss)
+    return regularized_training_loss
+
+  def predictions(self, logits, keys=None):
+    """Create predictions. See `base_head.Head` for details."""
+    logits_dict = self._check_logits_and_labels(logits)
+    predictions = {}
+    with ops.name_scope('merge_pred'):
+      for head in self._heads:
+        head_preds = head.predictions(logits=logits_dict[head.name])
+        for k, v in six.iteritems(head_preds):
+          predictions[(head.name, k)] = v
+    return predictions
+
+  def metrics(self, regularization_losses=None):
+    """Creates metrics. See `base_head.Head` for details."""
+    eval_metrics = {}
+    keys = metric_keys.MetricKeys
+    # Add regularization loss metric for multi_head.
+    if regularization_losses is not None:
+      eval_metrics[self._loss_regularization_key] = metrics.Mean(
+          name=keys.LOSS_REGULARIZATION)
+    with ops.name_scope('merge_eval'):
+      # Loss metric is not added by default in each head.
+      for loss_key in self._loss_keys:
+        eval_metrics[loss_key] = metrics.Mean(name=loss_key)
+    return eval_metrics
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates eval metrics. See `base_head.Head` for details."""
+    logits_dict = self._check_logits_and_labels(logits, labels)
+    # Update regularization loss metric
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      eval_metrics[self._loss_regularization_key].update_state(
+          values=regularization_loss)
+    # Update metrics for each head
+    for i, head in enumerate(self._heads):
+      head_logits = logits_dict[head.name]
+      head_labels = labels[head.name]
+      # Update loss metrics
+      training_loss = head.loss(
+          logits=head_logits, labels=head_labels, features=features)
+      eval_metrics[self._loss_keys[i]].update_state(values=training_loss)
+      # Update existing metrics in each head
+      head_metrics = head.metrics()
+      updated_metrics = head.update_metrics(head_metrics, features, head_logits,
+                                            head_labels)
+      eval_metrics.update(updated_metrics or {})
+    return eval_metrics
+
+  def create_estimator_spec(self,
+                            features,
+                            mode,
+                            logits,
+                            labels=None,
+                            optimizer=None,
+                            trainable_variables=None,
+                            train_op_fn=None,
+                            update_ops=None,
+                            regularization_losses=None):
+    """Returns a `model_fn.EstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: Input `dict` keyed by head name, or logits `Tensor` with shape
+        `[D0, D1, ... DN, logits_dimension]`. For many applications, the
+        `Tensor` shape is `[batch_size, logits_dimension]`. If logits is a
+        `Tensor`, it  will split the `Tensor` along the last dimension and
+        distribute it appropriately among the heads. Check `MultiHead` for
+        examples.
+      labels: Input `dict` keyed by head name. For each head, the label value
+        can be integer or string `Tensor` with shape matching its corresponding
+        `logits`.`labels` is a required argument when `mode` equals `TRAIN` or
+        `EVAL`.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results, in each head,
+        users need to use the default `loss_reduction=SUM_OVER_BATCH_SIZE` to
+        avoid scaling errors.  Compared to the regularization losses for each
+        head, this loss is to regularize the merged loss of all heads in multi
+        head, and will be added to the overall training loss of multi head.
+
+    Returns:
+      A `model_fn.EstimatorSpec` instance.
+
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+      mode, or if both are set.
+      If `mode` is not in Estimator's `ModeKeys`.
+    """
+    with ops.name_scope(self.name, 'multi_head'):
+      logits_dict = self._check_logits_and_labels(logits, labels)
+      # Get all estimator spec.
+      all_estimator_spec = []
+      for head in self._heads:
+        all_estimator_spec.append(
+            head.create_estimator_spec(
+                features=features,
+                mode=mode,
+                logits=logits_dict[head.name],
+                labels=labels[head.name] if labels else None,
+                train_op_fn=_no_op_train_fn))
+      # Predict.
+      predictions = self.predictions(logits)
+      if mode == ModeKeys.PREDICT:
+        export_outputs = self._merge_predict_export_outputs(all_estimator_spec)
+        return model_fn.EstimatorSpec(
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs=export_outputs)
+      loss = self.loss(labels, logits, features, mode, regularization_losses)
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        eval_metrics = self.metrics(regularization_losses=regularization_losses)
+        updated_metrics = self.update_metrics(
+            eval_metrics,
+            features,
+            logits,
+            labels,
+            regularization_losses=regularization_losses)
+        return model_fn.EstimatorSpec(
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=loss,
+            eval_metric_ops=updated_metrics)
+      # Train.
+      if mode == ModeKeys.TRAIN:
+        train_op = base_head.create_estimator_spec_train_op(
+            head_name=self.name,
+            optimizer=optimizer,
+            train_op_fn=train_op_fn,
+            update_ops=update_ops,
+            trainable_variables=trainable_variables,
+            regularized_training_loss=loss,
+            loss_reduction=self.loss_reduction)
+        # Create summary.
+        base_head.create_estimator_spec_summary(loss, regularization_losses)
+        # eval_metrics.
+        eval_metrics = {}
+        for spec in all_estimator_spec:
+          eval_metrics.update(spec.eval_metric_ops or {})
+        # predictions can be used to access the logits in `TRAIN` mode
+        return model_fn.EstimatorSpec(
+            mode=ModeKeys.TRAIN,
+            loss=loss,
+            train_op=train_op,
+            predictions=predictions,
+            eval_metric_ops=eval_metrics)
+      raise ValueError('mode={} unrecognized'.format(mode))
+
+  def _merge_predict_export_outputs(self, all_estimator_spec):
+    """Merges list of `EstimatorSpec` export_outputs for PREDICT.
+
+    For each individual head, its DEFAULT_SERVING_KEY and PREDICT_SERVING_KEY
+    are extracted and merged for `export_outputs` in PREDICT mode of
+    `EstimatorSpec`. By default, the first head is served.
+
+    Args:
+      all_estimator_spec: list of `EstimatorSpec` for the individual heads.
+
+    Returns:
+      A dict of merged export_outputs from all heads for PREDICT.
+    """
+    # The first head is used for serving by default.
+    export_outputs = {
+        base_head.DEFAULT_SERVING_KEY:
+            _default_export_output(all_estimator_spec[0].export_outputs,
+                                   self._heads[0].name),
+    }
+    merged_predict_outputs = {}
+    for head, spec in zip(self._heads, all_estimator_spec):
+      for k, v in six.iteritems(spec.export_outputs):
+        # Collect default serving key for export_outputs
+        key = (
+            head.name if k == base_head.DEFAULT_SERVING_KEY else '{}/{}'.format(
+                head.name, k))
+        export_outputs[key] = v
+        # Collect predict serving key for merged_predict_outputs
+        if (k == base_head.PREDICT_SERVING_KEY and
+            isinstance(v, export_output.PredictOutput)):
+          for kp, vp in six.iteritems(v.outputs):
+            merged_predict_outputs['{}/{}'.format(head.name, kp)] = vp
+    export_outputs[base_head.PREDICT_SERVING_KEY] = (
+        export_output.PredictOutput(merged_predict_outputs))
+    return export_outputs
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_label_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_label_head.py
new file mode 100644
index 00000000..7ac52e16
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/multi_label_head.py
@@ -0,0 +1,591 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Multi label head."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import metrics
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.ops import lookup_ops
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+@estimator_export('estimator.MultiLabelHead')
+class MultiLabelHead(base_head.Head):
+  """Creates a `Head` for multi-label classification.
+
+  Multi-label classification handles the case where each example may have zero
+  or more associated labels, from a discrete set. This is distinct from
+  `MultiClassHead` which has exactly one label per example.
+
+  Uses `sigmoid_cross_entropy` loss average over classes and weighted sum over
+  the batch. Namely, if the input logits have shape `[batch_size, n_classes]`,
+  the loss is the average over `n_classes` and the weighted sum over
+  `batch_size`.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, n_classes]`. In many
+  applications, the shape is `[batch_size, n_classes]`.
+
+  Labels can be:
+
+  * A multi-hot tensor of shape `[D0, D1, ... DN, n_classes]`
+  * An integer `SparseTensor` of class indices. The `dense_shape` must be
+    `[D0, D1, ... DN, ?]` and the values within `[0, n_classes)`.
+  * If `label_vocabulary` is given, a string `SparseTensor`. The `dense_shape`
+    must be `[D0, D1, ... DN, ?]` and the values within `label_vocabulary` or a
+    multi-hot tensor of shape `[D0, D1, ... DN, n_classes]`.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, or `[D0, D1, ... DN, 1]`.
+
+  Also supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features)` as arguments and returns unreduced loss with
+  shape `[D0, D1, ... DN, 1]`. `loss_fn` must support indicator `labels` with
+  shape `[D0, D1, ... DN, n_classes]`. Namely, the head applies
+  `label_vocabulary` to the input labels before passing them to `loss_fn`.
+
+  Usage:
+
+  >>> n_classes = 2
+  >>> head = tf.estimator.MultiLabelHead(n_classes)
+  >>> logits = np.array([[-1., 1.], [-1.5, 1.5]], dtype=np.float32)
+  >>> labels = np.array([[1, 0], [1, 1]], dtype=np.int64)
+  >>> features = {'x': np.array([[41], [42]], dtype=np.int32)}
+  >>> # expected_loss = sum(_sigmoid_cross_entropy(labels, logits)) / batch_size
+  >>> #               = sum(1.31326169, 0.9514133) / 2 = 1.13
+  >>> loss = head.loss(labels, logits, features=features)
+  >>> print('{:.2f}'.format(loss.numpy()))
+  1.13
+  >>> eval_metrics = head.metrics()
+  >>> updated_metrics = head.update_metrics(
+  ...   eval_metrics, features, logits, labels)
+  >>> for k in sorted(updated_metrics):
+  ...  print('{} : {:.2f}'.format(k, updated_metrics[k].result().numpy()))
+  auc : 0.33
+  auc_precision_recall : 0.77
+  average_loss : 1.13
+  >>> preds = head.predictions(logits)
+  >>> print(preds['logits'])
+  tf.Tensor(
+    [[-1.   1. ]
+     [-1.5  1.5]], shape=(2, 2), dtype=float32)
+
+  Usage with a canned estimator:
+
+  ```python
+  my_head = tf.estimator.MultiLabelHead(n_classes=3)
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.MultiLabelHead(n_classes=3)
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    n_classes: Number of classes, must be greater than 1 (for 1 class, use
+      `BinaryClassHead`).
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.  Per-class weighting is not
+      supported.
+    thresholds: Iterable of floats in the range `(0, 1)`. Accuracy, precision
+      and recall metrics are evaluated for each threshold value. The threshold
+      is applied to the predicted probabilities, i.e. above the threshold is
+      `true`, below is `false`.
+    label_vocabulary: A list of strings represents possible label values. If it
+      is not given, that means labels are already encoded as integer within [0,
+      n_classes) or multi-hot Tensor. If given, labels must be SparseTensor
+      `string` type and have any value in `label_vocabulary`. Also there will be
+      errors if vocabulary is not provided and labels are string.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`, namely
+      weighted sum of losses divided by batch size.
+    loss_fn: Optional loss function.
+    classes_for_class_based_metrics: List of integer class IDs or string class
+      names for which per-class metrics are evaluated. If integers, all must be
+      in the range `[0, n_classes - 1]`. If strings, all must be in
+      `label_vocabulary`.
+    name: Name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def __init__(self,
+               n_classes,
+               weight_column=None,
+               thresholds=None,
+               label_vocabulary=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               loss_fn=None,
+               classes_for_class_based_metrics=None,
+               name=None):
+    if n_classes is None or n_classes < 2:
+      raise ValueError('n_classes must be > 1 for multi-label classification. '
+                       'Given: {}'.format(n_classes))
+    thresholds = tuple(thresholds) if thresholds else tuple()
+    for threshold in thresholds:
+      if (threshold <= 0.0) or (threshold >= 1.0):
+        raise ValueError(
+            'thresholds must be in (0, 1) range. Given: {}'.format(threshold))
+    if label_vocabulary is not None:
+      if not isinstance(label_vocabulary, (list, tuple)):
+        raise ValueError('label_vocabulary must be a list or tuple. '
+                         'Given type: {}'.format(type(label_vocabulary)))
+      if len(label_vocabulary) != n_classes:
+        raise ValueError('Length of label_vocabulary must be n_classes ({}). '
+                         'Given: {}'.format(n_classes, len(label_vocabulary)))
+
+    if loss_fn:
+      base_head.validate_loss_fn_args(loss_fn)
+    base_head.validate_loss_reduction(loss_reduction)
+    if classes_for_class_based_metrics:
+      classes_for_class_based_metrics = tuple(classes_for_class_based_metrics)
+      if isinstance(classes_for_class_based_metrics[0], six.string_types):
+        if not label_vocabulary:
+          raise ValueError('label_vocabulary must be provided when '
+                           'classes_for_class_based_metrics are strings.')
+        class_ids = []
+        for class_string in classes_for_class_based_metrics:
+          class_ids.append(label_vocabulary.index(class_string))
+        classes_for_class_based_metrics = tuple(class_ids)
+      else:
+        for class_id in classes_for_class_based_metrics:
+          if (class_id < 0) or (class_id >= n_classes):
+            raise ValueError(
+                'All classes_for_class_based_metrics must be in range [0, {}]. '
+                'Given: {}'.format(n_classes - 1, class_id))
+    else:
+      classes_for_class_based_metrics = tuple()
+    self._n_classes = n_classes
+    self._weight_column = weight_column
+    self._thresholds = thresholds
+    self._label_vocabulary = label_vocabulary
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._classes_for_class_based_metrics = classes_for_class_based_metrics
+    self._name = name
+    # Metric keys.
+    keys = metric_keys.MetricKeys
+    self._loss_mean_key = self._summary_key(keys.LOSS_MEAN)
+    self._auc_key = self._summary_key(keys.AUC)
+    self._auc_pr_key = self._summary_key(keys.AUC_PR)
+    self._loss_regularization_key = self._summary_key(keys.LOSS_REGULARIZATION)
+    accuracy_keys = []
+    precision_keys = []
+    recall_keys = []
+    for threshold in self._thresholds:
+      accuracy_keys.append(
+          self._summary_key(keys.ACCURACY_AT_THRESHOLD % threshold))
+      precision_keys.append(
+          self._summary_key(keys.PRECISION_AT_THRESHOLD % threshold))
+      recall_keys.append(
+          self._summary_key(keys.RECALL_AT_THRESHOLD % threshold))
+    self._accuracy_keys = tuple(accuracy_keys)
+    self._precision_keys = tuple(precision_keys)
+    self._recall_keys = tuple(recall_keys)
+    prob_keys = []
+    auc_keys = []
+    auc_pr_keys = []
+    for class_id in self._classes_for_class_based_metrics:
+      if self._label_vocabulary is None:
+        prob_key = keys.PROBABILITY_MEAN_AT_CLASS % class_id
+        auc_key = keys.AUC_AT_CLASS % class_id
+        auc_pr_key = keys.AUC_PR_AT_CLASS % class_id
+      else:
+        prob_key = (
+            keys.PROBABILITY_MEAN_AT_NAME % self._label_vocabulary[class_id])
+        auc_key = keys.AUC_AT_NAME % self._label_vocabulary[class_id]
+        auc_pr_key = keys.AUC_PR_AT_NAME % self._label_vocabulary[class_id]
+      prob_keys.append(self._summary_key(prob_key))
+      auc_keys.append(self._summary_key(auc_key))
+      auc_pr_keys.append(self._summary_key(auc_pr_key))
+    self._prob_keys = tuple(prob_keys)
+    self._auc_keys = tuple(auc_keys)
+    self._auc_pr_keys = tuple(auc_pr_keys)
+
+  @property
+  def name(self):
+    """See `base_head.Head` for details."""
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    """See `base_head.Head` for details."""
+    return self._n_classes
+
+  @property
+  def loss_reduction(self):
+    """See `base_head.Head` for details."""
+    return self._loss_reduction
+
+  # An attribute for lookup table. Note that for Graph execution, the lookup
+  # table is created on demand to make sure the lookup table is in the same
+  # graph as its input tensors for `train` and `eval` of Estimator (as Estimator
+  # re-creates graphs for `train`, `eval` and `predict`).
+  _cached_class_id_table = None
+
+  @property
+  def _class_id_table(self):
+    """Creates a lookup table for class_id.
+
+    In eager execution, this lookup table will be lazily created on the first
+    call of `self._class_id_table`, and cached for later use; In graph
+    execution, it will be created on demand.
+
+    Returns:
+      A hash table for lookup.
+    """
+    if self._cached_class_id_table is None or not tf.executing_eagerly():
+      self._cached_class_id_table = lookup_ops.index_table_from_tensor(
+          vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup')
+    return self._cached_class_id_table
+
+  def _processed_labels(self, logits, labels):
+    """Converts labels to integer id space."""
+    if labels is None:
+      raise ValueError(base_head._LABEL_NONE_ERR_MSG)  # pylint:disable=protected-access
+    if isinstance(labels, tf.sparse.SparseTensor):
+      label_values = labels.values
+      if labels.dtype == tf.dtypes.string:
+        label_ids_values = self._class_id_table.lookup(label_values)
+        label_ids = tf.sparse.SparseTensor(
+            indices=labels.indices,
+            values=label_ids_values,
+            dense_shape=labels.dense_shape)
+        processed_labels = tf.sparse.to_indicator(label_ids, self._n_classes)
+      else:
+        if not label_values.dtype.is_integer:
+          raise ValueError(
+              'Labels dtype should be integer. Instead got {}.'.format(
+                  label_values.dtype))
+        err_msg = (r'labels must be an integer SparseTensor with values in '
+                   r'[0, {})'.format(self._n_classes))
+        label_values = base_head.check_label_range(
+            labels.values, self._n_classes, message=err_msg)
+        if tf.executing_eagerly():
+          processed_labels = tf.sparse.to_indicator(labels, self._n_classes)
+        else:
+          with tf.control_dependencies([label_values]):
+            processed_labels = tf.sparse.to_indicator(labels, self._n_classes)
+      processed_labels = tf.cast(processed_labels, dtype=tf.dtypes.int64)
+    else:
+      err_msg = (
+          r'labels must be an integer indicator Tensor with values in [0, 1]')
+      processed_labels = base_head.check_label_range(labels, 2, message=err_msg)
+
+    return base_head.check_dense_labels_match_logits_and_reshape(
+        labels=processed_labels,
+        logits=logits,
+        expected_labels_dimension=self.logits_dimension)
+
+  def _unweighted_loss_and_weights(self, logits, processed_labels, features):
+    """Computes loss spec."""
+    if self._loss_fn:
+      unweighted_loss = base_head.call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=processed_labels,
+          logits=logits,
+          features=features,
+          expected_loss_dim=1)
+    else:
+      unweighted_loss = tf.compat.v1.losses.sigmoid_cross_entropy(
+          multi_class_labels=processed_labels,
+          logits=logits,
+          reduction=tf.compat.v1.losses.Reduction.NONE)
+      # Averages loss over classes.
+      unweighted_loss = tf.math.reduce_mean(
+          unweighted_loss, axis=-1, keepdims=True)
+    weights = base_head.get_weights_and_check_match_logits(
+        features=features, weight_column=self._weight_column, logits=logits)
+    return unweighted_loss, weights
+
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Returns regularized training loss. See `base_head.Head` for details."""
+    del mode  # Unused for this head.
+    with ops.name_scope(
+        'losses', values=(logits, labels, regularization_losses, features)):
+      logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+      processed_labels = self._processed_labels(logits, labels)
+      unweighted_loss, weights = self._unweighted_loss_and_weights(
+          logits, processed_labels, features)
+      training_loss = losses_utils.compute_weighted_loss(
+          unweighted_loss,
+          sample_weight=weights,
+          reduction=self._loss_reduction)
+      regularization_loss = tf.math.add_n(
+          regularization_losses) if regularization_losses is not None else None
+      regularized_training_loss = (
+          training_loss + regularization_loss
+          if regularization_loss is not None else training_loss)
+    return regularized_training_loss
+
+  def predictions(self, logits, keys=None):
+    """Return predictions based on keys.
+
+    See `base_head.Head` for details.
+
+    Args:
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      keys: a list of prediction keys. Key can be either the class variable
+        of prediction_keys.PredictionKeys or its string value, such as:
+          prediction_keys.PredictionKeys.LOGITS or 'logits'.
+
+    Returns:
+      A dict of predictions.
+    """
+    pred_keys = prediction_keys.PredictionKeys
+    valid_keys = [pred_keys.LOGITS, pred_keys.PROBABILITIES, pred_keys.CLASSES]
+    if keys:
+      base_head.check_prediction_keys(keys, valid_keys)
+    else:
+      keys = valid_keys
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    predictions = {}
+    with ops.name_scope('predictions', values=(logits,)):
+      if pred_keys.LOGITS in keys:
+        predictions[pred_keys.LOGITS] = logits
+      if pred_keys.PROBABILITIES in keys:
+        probabilities = tf.math.sigmoid(logits, name=pred_keys.PROBABILITIES)
+        predictions[pred_keys.PROBABILITIES] = probabilities
+      if pred_keys.CLASSES in keys:
+        predictions[pred_keys.CLASSES] = base_head.all_classes(
+            logits, self._n_classes, self._label_vocabulary)
+
+      return predictions
+
+  def metrics(self, regularization_losses=None):
+    """Creates metrics. See `base_head.Head` for details."""
+    keys = metric_keys.MetricKeys
+    with ops.name_scope(None, 'metrics', (regularization_losses,)):
+      # Mean metric.
+      eval_metrics = {}
+      eval_metrics[self._loss_mean_key] = metrics.Mean(name=keys.LOSS_MEAN)
+      # The default summation_method is "interpolation" in the AUC metric.
+      eval_metrics[self._auc_key] = metrics.AUC(name=keys.AUC)
+      eval_metrics[self._auc_pr_key] = metrics.AUC(curve='PR', name=keys.AUC_PR)
+      if regularization_losses is not None:
+        eval_metrics[self._loss_regularization_key] = metrics.Mean(
+            name=keys.LOSS_REGULARIZATION)
+      for i, threshold in enumerate(self._thresholds):
+        eval_metrics[self._accuracy_keys[i]] = metrics.BinaryAccuracy(
+            name=self._accuracy_keys[i], threshold=threshold)
+        eval_metrics[self._precision_keys[i]] = (
+            metrics.Precision(
+                name=self._precision_keys[i], thresholds=threshold))
+        eval_metrics[self._recall_keys[i]] = metrics.Recall(
+            name=self._recall_keys[i], thresholds=threshold)
+      for i in range(len(self._classes_for_class_based_metrics)):
+        eval_metrics[self._prob_keys[i]] = metrics.Mean(name=self._prob_keys[i])
+        eval_metrics[self._auc_keys[i]] = metrics.AUC(name=self._auc_keys[i])
+        eval_metrics[self._auc_pr_keys[i]] = metrics.AUC(
+            curve='PR', name=self._auc_pr_keys[i])
+    return eval_metrics
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates eval metrics. See `base_head.Head` for details."""
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    processed_labels = self._processed_labels(logits, labels)
+    unweighted_loss, weights = self._unweighted_loss_and_weights(
+        logits, processed_labels, features)
+    prob_key = prediction_keys.PredictionKeys.PROBABILITIES
+    predictions = self.predictions(logits, [prob_key])
+    probabilities = predictions[prob_key]
+
+    # Update metrics.
+    eval_metrics[self._loss_mean_key].update_state(
+        values=unweighted_loss, sample_weight=weights)
+    eval_metrics[self._auc_key].update_state(
+        y_true=processed_labels, y_pred=probabilities, sample_weight=weights)
+    eval_metrics[self._auc_pr_key].update_state(
+        y_true=processed_labels, y_pred=probabilities, sample_weight=weights)
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      eval_metrics[self._loss_regularization_key].update_state(
+          values=regularization_loss)
+    for i in range(len(self._thresholds)):
+      eval_metrics[self._accuracy_keys[i]].update_state(
+          y_true=processed_labels, y_pred=probabilities, sample_weight=weights)
+      eval_metrics[self._precision_keys[i]].update_state(
+          y_true=processed_labels, y_pred=probabilities, sample_weight=weights)
+      eval_metrics[self._recall_keys[i]].update_state(
+          y_true=processed_labels, y_pred=probabilities, sample_weight=weights)
+    for i, class_id in enumerate(self._classes_for_class_based_metrics):
+      batch_rank = tf.rank(probabilities) - 1
+      begin = tf.concat(
+          [tf.zeros([batch_rank], dtype=tf.dtypes.int32), [class_id]], axis=0)
+      size = tf.concat([-1 * tf.ones([batch_rank], dtype=tf.dtypes.int32), [1]],
+                       axis=0)
+      class_probabilities = tf.slice(probabilities, begin=begin, size=size)
+      class_labels = tf.slice(processed_labels, begin=begin, size=size)
+      base_head.update_metric_with_broadcast_weights(
+          eval_metrics[self._prob_keys[i]], class_probabilities, weights)
+      eval_metrics[self._auc_keys[i]].update_state(
+          y_true=class_labels,
+          y_pred=class_probabilities,
+          sample_weight=weights)
+      eval_metrics[self._auc_pr_keys[i]].update_state(
+          y_true=class_labels,
+          y_pred=class_probabilities,
+          sample_weight=weights)
+    return eval_metrics
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 update_ops=None,
+                                 regularization_losses=None):
+    """Returns an `model_fn._TPUEstimatorSpec`.
+
+    Args:
+      features: Input `dict` of `Tensor` or `SparseTensor` objects.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, n_classes]`. For many
+        applications, the shape is `[batch_size, n_classes]`.
+      labels: Labels with shape matching `logits`. Can be multi-hot `Tensor`
+        with shape `[D0, D1, ... DN, n_classes]` or `SparseTensor` with
+        `dense_shape` `[D0, D1, ... DN, ?]`. `labels` is required argument when
+        `mode` equals `TRAIN` or `EVAL`.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize
+        `loss`.able_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      `model_fn._TPUEstimatorSpec`.
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    with ops.name_scope(self._name, 'head'):
+      # Predict.
+      pred_keys = prediction_keys.PredictionKeys
+      predictions = self.predictions(logits)
+      if mode == ModeKeys.PREDICT:
+        probabilities = predictions[pred_keys.PROBABILITIES]
+        classifier_output = base_head.classification_output(
+            scores=probabilities,
+            n_classes=self._n_classes,
+            label_vocabulary=self._label_vocabulary)
+        return model_fn._TPUEstimatorSpec(  # pylint:disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                base_head.DEFAULT_SERVING_KEY: classifier_output,
+                base_head.CLASSIFY_SERVING_KEY: classifier_output,
+                base_head.PREDICT_SERVING_KEY: (
+                    export_output.PredictOutput(predictions))
+            })
+
+      regularized_training_loss = self.loss(
+          logits=logits,
+          labels=labels,
+          features=features,
+          mode=mode,
+          regularization_losses=regularization_losses)
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        eval_metrics = self.metrics(regularization_losses=regularization_losses)
+        return model_fn._TPUEstimatorSpec(  # pylint:disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=base_head.create_eval_metrics_tuple(
+                self.update_metrics, {
+                    'eval_metrics': eval_metrics,
+                    'features': features,
+                    'logits': logits,
+                    'labels': labels,
+                    'regularization_losses': regularization_losses
+                }))
+      # Train.
+      train_op = base_head.create_estimator_spec_train_op(
+          head_name=self._name,
+          optimizer=optimizer,
+          train_op_fn=train_op_fn,
+          update_ops=update_ops,
+          trainable_variables=trainable_variables,
+          regularized_training_loss=regularized_training_loss,
+          loss_reduction=self._loss_reduction)
+    # Create summary.
+    base_head.create_estimator_spec_summary(
+        regularized_training_loss=regularized_training_loss,
+        regularization_losses=regularization_losses,
+        summary_key_fn=self._summary_key)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/regression_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/regression_head.py
new file mode 100644
index 00000000..d70cde6e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/regression_head.py
@@ -0,0 +1,583 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Regression head."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import metrics
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn
+from tensorflow_estimator.python.estimator.canned import metric_keys
+from tensorflow_estimator.python.estimator.canned import prediction_keys
+from tensorflow_estimator.python.estimator.export import export_output
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+@estimator_export('estimator.RegressionHead')
+class RegressionHead(base_head.Head):
+  """Creates a `Head` for regression using the `mean_squared_error` loss.
+
+  The loss is the weighted sum over all input dimensions. Namely, if the input
+  labels have shape `[batch_size, label_dimension]`, the loss is the weighted
+  sum over both `batch_size` and `label_dimension`.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, label_dimension]`.
+  In many applications, the shape is `[batch_size, label_dimension]`.
+
+  The `labels` shape must match `logits`, namely
+  `[D0, D1, ... DN, label_dimension]`. If `label_dimension=1`, shape
+  `[D0, D1, ... DN]` is also supported.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, `[D0, D1, ... DN, 1]` or
+  `[D0, D1, ... DN, label_dimension]`.
+
+  Supports custom `loss_fn`. `loss_fn` takes `(labels, logits)` or
+  `(labels, logits, features, loss_reduction)` as arguments and returns
+  unreduced loss with shape `[D0, D1, ... DN, label_dimension]`.
+
+  Also supports custom `inverse_link_fn`, also known as 'mean function'.
+  `inverse_link_fn` is only used in `PREDICT` mode. It takes `logits` as
+  argument and returns predicted values. This function is the inverse of the
+  link function defined in
+  https://en.wikipedia.org/wiki/Generalized_linear_model#Link_function
+  Namely, for poisson regression, set `inverse_link_fn=tf.exp`.
+
+  Usage:
+
+  >>> head = tf.estimator.RegressionHead()
+  >>> logits = np.array(((45,), (41,),), dtype=np.float32)
+  >>> labels = np.array(((43,), (44,),), dtype=np.int32)
+  >>> features = {'x': np.array(((42,),), dtype=np.float32)}
+  >>> # expected_loss = weighted_loss / batch_size
+  >>> #               = (43-45)^2 + (44-41)^2 / 2 = 6.50
+  >>> loss = head.loss(labels, logits, features=features)
+  >>> print('{:.2f}'.format(loss.numpy()))
+  6.50
+  >>> eval_metrics = head.metrics()
+  >>> updated_metrics = head.update_metrics(
+  ...   eval_metrics, features, logits, labels)
+  >>> for k in sorted(updated_metrics):
+  ...  print('{} : {:.2f}'.format(k, updated_metrics[k].result().numpy()))
+    average_loss : 6.50
+    label/mean : 43.50
+    prediction/mean : 43.00
+  >>> preds = head.predictions(logits)
+  >>> print(preds['predictions'])
+  tf.Tensor(
+    [[45.]
+     [41.]], shape=(2, 1), dtype=float32)
+
+  Usage with a canned estimator:
+
+  ```python
+  my_head = tf.estimator.RegressionHead()
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.RegressionHead()
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    label_dimension: Number of regression labels per example. This is the size
+      of the last dimension of the labels `Tensor` (typically, this has shape
+      `[batch_size, label_dimension]`).
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch and label dimension. Defaults to
+      `SUM_OVER_BATCH_SIZE`, namely weighted sum of losses divided by
+      `batch_size * label_dimension`.
+    loss_fn: Optional loss function. Defaults to `mean_squared_error`.
+    inverse_link_fn: Optional inverse link function, also known as 'mean
+      function'. Defaults to identity.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def __init__(self,
+               label_dimension=1,
+               weight_column=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               loss_fn=None,
+               inverse_link_fn=None,
+               name=None):
+    if label_dimension < 1:
+      raise ValueError('Invalid label_dimension {}.'.format(label_dimension))
+    base_head.validate_loss_reduction(loss_reduction)
+    if loss_fn:
+      base_head.validate_loss_fn_args(loss_fn)
+    self._logits_dimension = label_dimension
+    self._weight_column = weight_column
+    self._loss_reduction = loss_reduction
+    self._loss_fn = loss_fn
+    self._inverse_link_fn = inverse_link_fn
+    self._name = name
+    # Metric keys.
+    keys = metric_keys.MetricKeys
+    self._loss_mean_key = self._summary_key(keys.LOSS_MEAN)
+    self._prediction_mean_key = self._summary_key(keys.PREDICTION_MEAN)
+    self._label_mean_key = self._summary_key(keys.LABEL_MEAN)
+    self._loss_regularization_key = self._summary_key(keys.LOSS_REGULARIZATION)
+
+  @property
+  def name(self):
+    """See `base_head.Head` for details."""
+    return self._name
+
+  @property
+  def logits_dimension(self):
+    """See `base_head.Head` for details."""
+    return self._logits_dimension
+
+  @property
+  def loss_reduction(self):
+    """See `base_head.Head` for details."""
+    return self._loss_reduction
+
+  def _processed_labels(self, logits, labels):
+    labels = base_head.check_dense_labels_match_logits_and_reshape(
+        labels=labels,
+        logits=logits,
+        expected_labels_dimension=self._logits_dimension)
+    labels = tf.cast(labels, dtype=tf.dtypes.float32)
+    return labels
+
+  def _unweighted_loss_and_weights(self, logits, labels, features):
+    """Computes unweighted loss and weights."""
+    if self._loss_fn:
+      unweighted_loss = base_head.call_loss_fn(
+          loss_fn=self._loss_fn,
+          labels=labels,
+          logits=logits,
+          features=features,
+          expected_loss_dim=self._logits_dimension)
+    else:
+      unweighted_loss = tf.compat.v1.losses.mean_squared_error(
+          labels=labels,
+          predictions=logits,
+          reduction=tf.compat.v1.losses.Reduction.NONE)
+    weights = base_head.get_weights_and_check_match_logits(
+        features=features,
+        weight_column=self._weight_column,
+        logits=logits,
+        allow_per_logit_weights=True)
+    return unweighted_loss, weights
+
+  def loss(self,
+           labels,
+           logits,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Return predictions based on keys. See `base_head.Head` for details."""
+    del mode  # Unused for this head.
+    with ops.name_scope(
+        'losses', values=(logits, labels, regularization_losses, features)):
+      logits = base_head.check_logits_final_dim(logits, self._logits_dimension)
+      labels = self._processed_labels(logits, labels)
+      unweighted_loss, weights = self._unweighted_loss_and_weights(
+          logits, labels, features)
+      training_loss = losses_utils.compute_weighted_loss(
+          unweighted_loss,
+          sample_weight=weights,
+          reduction=self._loss_reduction)
+      regularization_loss = tf.math.add_n(
+          regularization_losses) if regularization_losses is not None else None
+      regularized_training_loss = (
+          training_loss + regularization_loss
+          if regularization_loss is not None else training_loss)
+    return regularized_training_loss
+
+  def predictions(self, logits):
+    """Return predictions based on keys.
+
+    See `base_head.Head` for details.
+
+    Args:
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+
+    Returns:
+      A dict of predictions.
+    """
+    logits = base_head.check_logits_final_dim(logits, self._logits_dimension)
+    pred_keys = prediction_keys.PredictionKeys
+    with ops.name_scope('predictions', values=(logits,)):
+      if self._inverse_link_fn:
+        predicted_value = self._inverse_link_fn(logits)
+        predictions = {
+            pred_keys.PREDICTIONS: predicted_value,
+            pred_keys.LOGITS: logits,
+        }
+      else:
+        predicted_value = logits
+        predictions = {pred_keys.PREDICTIONS: predicted_value}
+    return predictions
+
+  def metrics(self, regularization_losses=None):
+    """Creates metrics. See `base_head.Head` for details."""
+    with ops.name_scope('metrics', values=(regularization_losses,)):
+      keys = metric_keys.MetricKeys
+      eval_metrics = {}
+      eval_metrics[self._loss_mean_key] = metrics.Mean(name=keys.LOSS_MEAN)
+      eval_metrics[self._prediction_mean_key] = metrics.Mean(
+          name=keys.PREDICTION_MEAN)
+      eval_metrics[self._label_mean_key] = metrics.Mean(name=keys.LABEL_MEAN)
+
+      if regularization_losses is not None:
+        eval_metrics[self._loss_regularization_key] = metrics.Mean(
+            name=keys.LOSS_REGULARIZATION)
+    return eval_metrics
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates eval metrics. See `base_head.Head` for details."""
+    # Compute predictions.
+    predictions = self.predictions(logits)
+    predicted_value = predictions[prediction_keys.PredictionKeys.PREDICTIONS]
+    logits = base_head.check_logits_final_dim(logits, self.logits_dimension)
+    label_ids = self._processed_labels(logits, labels)
+    unweighted_loss, weights = self._unweighted_loss_and_weights(
+        logits, label_ids, features)
+
+    # Update metrics.
+    eval_metrics[self._loss_mean_key].update_state(
+        values=unweighted_loss, sample_weight=weights)
+    eval_metrics[self._label_mean_key].update_state(
+        values=labels, sample_weight=weights)
+    base_head.update_metric_with_broadcast_weights(
+        eval_metrics[self._prediction_mean_key], predicted_value, weights)
+    if regularization_losses is not None:
+      regularization_loss = tf.math.add_n(regularization_losses)
+      eval_metrics[self._loss_regularization_key].update_state(
+          values=regularization_loss)
+    return eval_metrics
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 update_ops=None,
+                                 regularization_losses=None):
+    """Returns an `EstimatorSpec`.
+
+    Args:
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Often to be used to fetch example-weight tensor.
+      mode: Estimator's `ModeKeys`.
+      logits: logits `Tensor` with shape `[D0, D1, ... DN, logits_dimension]`.
+        For many applications, the shape is `[batch_size, logits_dimension]`.
+      labels: Labels `Tensor` with shape matching `logits`, namely `[D0, D1, ...
+        DN, logits_dimension]`. When `logits_dimension=1`, shape `[D0, D1, ...
+        DN]` is also supported. `labels` is a required argument when `mode`
+        equals `TRAIN` or `EVAL`.
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets `train_op = optimizer.get_updates(loss,
+        trainable_variables)`, which updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns
+        `train_op`. Used if `optimizer` is `None`.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses. These losses are
+        usually expressed as a batch average, so for best results users need to
+        set `loss_reduction=SUM_OVER_BATCH_SIZE` when creating the head to avoid
+        scaling errors.
+
+    Returns:
+      A `model_fn._TPUEstimatorSpec` instance.
+
+    Raises:
+      ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
+        mode, or if both are set.
+    """
+    with ops.name_scope(self._name, 'head'):
+      # Predict.
+      predictions = self.predictions(logits)
+      if mode == ModeKeys.PREDICT:
+        keys = prediction_keys.PredictionKeys
+        regression_output = export_output.RegressionOutput(
+            value=predictions[keys.PREDICTIONS])
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.PREDICT,
+            predictions=predictions,
+            export_outputs={
+                base_head.DEFAULT_SERVING_KEY: regression_output,
+                base_head.REGRESS_SERVING_KEY: regression_output,
+                base_head.PREDICT_SERVING_KEY: export_output.PredictOutput(
+                    predictions)
+            })
+      regularized_training_loss = self.loss(
+          logits=logits,
+          labels=labels,
+          features=features,
+          mode=mode,
+          regularization_losses=regularization_losses)
+      # Eval.
+      if mode == ModeKeys.EVAL:
+        eval_metrics = self.metrics(regularization_losses=regularization_losses)
+        return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+            mode=ModeKeys.EVAL,
+            predictions=predictions,
+            loss=regularized_training_loss,
+            eval_metrics=base_head.create_eval_metrics_tuple(
+                self.update_metrics, {
+                    'eval_metrics': eval_metrics,
+                    'features': features,
+                    'logits': logits,
+                    'labels': labels,
+                    'regularization_losses': regularization_losses
+                }))
+      # Train.
+      train_op = base_head.create_estimator_spec_train_op(
+          head_name=self._name,
+          optimizer=optimizer,
+          train_op_fn=train_op_fn,
+          update_ops=update_ops,
+          trainable_variables=trainable_variables,
+          regularized_training_loss=regularized_training_loss,
+          loss_reduction=self._loss_reduction)
+    # Create summary.
+    base_head.create_estimator_spec_summary(
+        regularized_training_loss=regularized_training_loss,
+        regularization_losses=regularization_losses,
+        summary_key_fn=self._summary_key)
+    return model_fn._TPUEstimatorSpec(  # pylint: disable=protected-access
+        mode=ModeKeys.TRAIN,
+        predictions=predictions,
+        loss=regularized_training_loss,
+        train_op=train_op)
+
+
+@estimator_export('estimator.PoissonRegressionHead')
+class PoissonRegressionHead(RegressionHead):
+  """Creates a `Head` for poisson regression using `tf.nn.log_poisson_loss`.
+
+  The loss is the weighted sum over all input dimensions. Namely, if the input
+  labels have shape `[batch_size, label_dimension]`, the loss is the weighted
+  sum over both `batch_size` and `label_dimension`.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, label_dimension]`.
+  In many applications, the shape is `[batch_size, label_dimension]`.
+
+  The `labels` shape must match `logits`, namely
+  `[D0, D1, ... DN, label_dimension]`. If `label_dimension=1`, shape
+  `[D0, D1, ... DN]` is also supported.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]`, `[D0, D1, ... DN, 1]` or
+  `[D0, D1, ... DN, label_dimension]`.
+
+  This is implemented as a generalized linear model, see
+  https://en.wikipedia.org/wiki/Generalized_linear_model.
+
+  The head can be used with a canned estimator. Example:
+
+  ```python
+  my_head = tf.estimator.PoissonRegressionHead()
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.PoissonRegressionHead()
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    label_dimension: Number of regression labels per example. This is the size
+      of the last dimension of the labels `Tensor` (typically, this has shape
+      `[batch_size, label_dimension]`).
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch and label dimension. Defaults to
+      `SUM_OVER_BATCH_SIZE`, namely weighted sum of losses divided by `batch
+      size * label_dimension`.
+    compute_full_loss: Whether to include the constant `log(z!)` term in
+      computing the poisson loss. See `tf.nn.log_poisson_loss` for the full
+      documentation.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def __init__(self,
+               label_dimension=1,
+               weight_column=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               compute_full_loss=True,
+               name=None):
+    self._compute_full_loss = compute_full_loss
+    super(PoissonRegressionHead, self).__init__(
+        label_dimension=label_dimension,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction,
+        loss_fn=self._poisson_loss,
+        inverse_link_fn=tf.math.exp,
+        name=name)
+
+  def _poisson_loss(self, labels, logits):
+    return tf.nn.log_poisson_loss(
+        targets=labels,
+        log_input=logits,
+        compute_full_loss=self._compute_full_loss)
+
+
+@estimator_export('estimator.LogisticRegressionHead')
+class LogisticRegressionHead(RegressionHead):
+  """Creates a `Head` for logistic regression.
+
+  Uses `sigmoid_cross_entropy_with_logits` loss, which is the same as
+  `BinaryClassHead`. The differences compared to `BinaryClassHead` are:
+
+  * Does not support `label_vocabulary`. Instead, labels must be float in the
+    range [0, 1].
+  * Does not calculate some metrics that do not make sense, such as AUC.
+  * In `PREDICT` mode, only returns logits and predictions
+    (`=tf.sigmoid(logits)`), whereas `BinaryClassHead` also returns
+    probabilities, classes, and class_ids.
+  * Export output defaults to `RegressionOutput`, whereas `BinaryClassHead`
+    defaults to `PredictOutput`.
+
+  The head expects `logits` with shape `[D0, D1, ... DN, 1]`.
+  In many applications, the shape is `[batch_size, 1]`.
+
+  The `labels` shape must match `logits`, namely
+  `[D0, D1, ... DN]` or `[D0, D1, ... DN, 1]`.
+
+  If `weight_column` is specified, weights must be of shape
+  `[D0, D1, ... DN]` or `[D0, D1, ... DN, 1]`.
+
+  This is implemented as a generalized linear model, see
+  https://en.wikipedia.org/wiki/Generalized_linear_model.
+
+  The head can be used with a canned estimator. Example:
+
+  ```python
+  my_head = tf.estimator.LogisticRegressionHead()
+  my_estimator = tf.estimator.DNNEstimator(
+      head=my_head,
+      hidden_units=...,
+      feature_columns=...)
+  ```
+
+  It can also be used with a custom `model_fn`. Example:
+
+  ```python
+  def _my_model_fn(features, labels, mode):
+    my_head = tf.estimator.LogisticRegressionHead()
+    logits = tf.keras.Model(...)(features)
+
+    return my_head.create_estimator_spec(
+        features=features,
+        mode=mode,
+        labels=labels,
+        optimizer=tf.keras.optimizers.Adagrad(lr=0.1),
+        logits=logits)
+
+  my_estimator = tf.estimator.Estimator(model_fn=_my_model_fn)
+  ```
+
+  Args:
+    weight_column: A string or a `NumericColumn` created by
+      `tf.feature_column.numeric_column` defining feature column representing
+      weights. It is used to down weight or boost examples during training. It
+      will be multiplied by the loss of the example.
+    loss_reduction: One of `tf.losses.Reduction` except `NONE`. Decides how to
+      reduce training loss over batch and label dimension. Defaults to
+      `SUM_OVER_BATCH_SIZE`, namely weighted sum of losses divided by `batch
+      size * label_dimension`.
+    name: name of the head. If provided, summary and metrics keys will be
+      suffixed by `"/" + name`. Also used as `name_scope` when creating ops.
+  """
+
+  def _logistic_loss(self, labels, logits):
+    labels = base_head.check_label_range(
+        labels, n_classes=2, message='Labels must be in range [0, 1]')
+    return tf.compat.v1.nn.sigmoid_cross_entropy_with_logits(
+        labels=labels, logits=logits)
+
+  def __init__(self,
+               weight_column=None,
+               loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
+               name=None):
+    super(LogisticRegressionHead, self).__init__(
+        label_dimension=1,
+        weight_column=weight_column,
+        loss_reduction=loss_reduction,
+        loss_fn=self._logistic_loss,
+        inverse_link_fn=tf.math.sigmoid,
+        name=name)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/sequential_head.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/sequential_head.py
new file mode 100644
index 00000000..40f4fdb8
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/head/sequential_head.py
@@ -0,0 +1,494 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Defines a head for sequential models."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+
+import six
+import tensorflow as tf
+
+if six.PY3:
+  from collections.abc import Iterable
+else:
+  from collections import Iterable
+
+from tensorflow.python.framework import ops
+from tensorflow_estimator.python.estimator.head import base_head
+from tensorflow_estimator.python.estimator.head import multi_head
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+
+class _SequentialHead(base_head.Head):
+  """Interface for the head of a sequential model.
+
+  A sequential head handles input sequences of different lengths to compute the
+  output of a model. It requires a sequence mask tensor, to indicate which steps
+  of the sequences are padded and ensure proper aggregation for loss and metrics
+  computation. It has a `input_sequence_mask_key` property that specifies which
+  tensor of the feature dictionary to use as the sequence mask tensor.
+
+  Such a head can for instance be used with `RNNEstimator` for sequential
+  predictions.
+
+  Example of usage:
+    ```python
+    def _my_model_fn(features, labels, mode, params, config=None):
+      feature_layer = tf.feature_column.SequenceFeatureLayer(columns)
+      input_layer, sequence_length = feature_layer(features)
+      sequence_length_mask = tf.sequence_mask(sequence_length)
+      rnn_layer = tf.keras.layers.RNN(cell=tf.keras.layers.SimpleRNNCell(units),
+                                      return_sequences=True)
+      logits = rnn_layer(input_layer, mask=sequence_length_mask)
+      features[sequential_head.input_sequence_mask_key] = sequence_length_mask
+      return sequential_head.create_estimator_spec(
+          features=features,
+          labels=labels,
+          mode=mode,
+          logits=logits,
+          optimizer=optimizer)
+    ```
+  """
+  __metaclass__ = abc.ABCMeta
+
+  @abc.abstractproperty
+  def input_sequence_mask_key(self):
+    """Key of the sequence mask tensor in the feature dictionary.
+
+    Returns:
+      A string.
+    """
+    raise NotImplementedError('Calling an abstract method.')
+
+
+class SequentialHeadWrapper(_SequentialHead):
+  """Sequential head wrapping a Head object.
+
+  Wraps a `Head` object and applies a sequential mask to:
+    - Loss aggregation: To only account for masked steps. Used for
+      `create_estimator_spec` and `loss` methods.
+    - Metrics: The sequence mask is used to only account for mask steps in
+      metrics computation with the `update_metrics` method.
+    - Predictions: To add a sequence length mask tensor to the predictions
+      dictionary.
+  """
+
+  def __init__(self,
+               static_head,
+               sequence_length_mask='sequence_length_mask',
+               feature_columns=None):
+    """Initializes a `SequentialHeadWrapper` instance.
+
+    Example of usage:
+      ```python
+      # Define a sequential head.
+      static_head = tf.estimator.BinaryClassHead(weight_column='weights')
+      sequential_head = head_lib.SequentialHeadWrapper(
+          static_head=static_head, sequence_length_mask='mask',
+          feature_columns='weights')
+
+      # Define feature columns and parsing spec.
+      feature_columns = [
+        tf.feature_column.sequence_numeric_column('sequential-feature')
+      ]
+      label_column = tf.feature_column.sequence_numeric_column(
+          'label', dtype=tf.int32),
+      weight_column = tf.feature_column.sequence_numeric_column('weights')
+      parsing_spec = tf.feature_column.make_parse_example_spec(
+          feature_columns + [label_column, weight_column])
+
+      # Use the head in a model function.
+      def _my_model_fn(features, labels, mode, params, config=None):
+        feature_layer = tf.feature_column.SequenceFeatureLayer(feature_columns)
+        input_layer, sequence_length = feature_layer(features)
+        sequence_length_mask = tf.sequence_mask(sequence_length)
+        rnn_layer = tf.keras.layers.RNN(
+            cell=tf.keras.layers.SimpleRNNCell(units),
+            return_sequences=True)
+        logits = rnn_layer(input_layer, mask=sequence_length_mask)
+        features['mask'] = sequence_length_mask
+        return sequential_head.create_estimator_spec(
+            features=features,
+            labels=labels,
+            mode=mode,
+            logits=logits,
+            optimizer=optimizer)
+      ```
+
+    Args:
+      static_head: `Head` object, static head to wrap.
+      sequence_length_mask: `str`, name of sequence length mask tensor in
+        features dictionary. Tensor must be a dense tensor of shape [batch_size,
+        seq_length].
+      feature_columns: `str` or list of the former. Specifies the features of
+        the features dictionary to which the sequence length mask must be
+        applied, and which are passed to the static head's methods when calling
+        `create_estimator_spec`, `loss` or `update_metrics`. This is typically a
+        weight tensor.
+
+    Raises:
+      TypeError: If `sequence_length_mask` is not of string type.
+      TypeError: If provided features columns are not of string type.
+    """
+    # Verify and set sequence mask column.
+    # TODO(aarg): Add support for `NumericColumn`.
+    if not isinstance(sequence_length_mask, six.string_types):
+      raise TypeError('`sequence_mask` column must be a string. '
+                      'Given type: {}.'.format(type(sequence_length_mask)))
+    self._sequence_length_mask = sequence_length_mask
+
+    # Verify and set feature columns (to be flattened).
+    feature_columns = feature_columns or []
+    if not isinstance(feature_columns, Iterable):
+      raise TypeError('`feature_columns` must be either a string or an '
+                      'iterable of strings got {} instead.'.format(
+                          type(feature_columns)))
+    if isinstance(feature_columns, six.string_types):
+      self._feature_columns = [feature_columns]
+    else:
+      self._feature_columns = feature_columns
+
+    for column in self._feature_columns:
+      # TODO(aarg): Add support for `NumericColumn` and `SequenceNumericColumn`.
+      if not isinstance(column, six.string_types):
+        raise TypeError('Column must a string. Given type: {}.'.format(
+            type(column)))
+
+    # Set other variables.
+    if isinstance(static_head, multi_head.MultiHead):
+      # TODO(aarg): Add support for MultiHead.
+      raise ValueError(
+          '`MultiHead` is not supported with `SequentialHeadWrapper`.')
+    self._static_head = static_head
+
+    super(SequentialHeadWrapper, self).__init__()
+
+  def _flatten(self, labels, logits, features):
+    """Flattens labels, logits, and features tensors.
+
+    Provided tensors need to have at least two dimensions. The two first
+    dimensions of the provided tensors are flattened to one single dimension.
+    If a tensor is dense, the sequence mask in the features dictionary is used
+    to flatten it.
+
+    Note: If indices of a sparse tensor are not sorted, they will be reordered.
+
+    Args:
+      labels: `Tensor` or `SparseTensor` to flatten.
+      logits: `Tensor` or `SparseTensor` to flatten.
+      features: Dictionary of `Tensor` or `SparseTensor` objects to flatten.
+
+    Returns:
+      - Dense `Tensor` with flattened labels.
+      - Dense `Tensor` with flattened logits.
+      - Dictionary of flattened dense `Tensor` objects.
+
+    Raises:
+      ValueError: If the sequence mask is not found in `features`.
+      ValueError: If one of the provided tensors to flatten has not at least two
+        dimensions.
+    """
+    # Retrieve sequence_mask from features dictionary.
+    if self.input_sequence_mask_key not in features:
+      raise ValueError('The provided sequence_length_mask key `{}` should be '
+                       'included in the features dictionary, but was not '
+                       'found. Found keys: {}.'.format(
+                           self.input_sequence_mask_key, list(features.keys())))
+    sequence_mask = features[self.input_sequence_mask_key]
+    if sequence_mask.get_shape().ndims != 2:
+      raise ValueError('Mask is expected to have two dimensions, got '
+                       '{} instead.'.format(sequence_mask.get_shape().ndims))
+
+    with ops.name_scope('flatten'):
+      expected_length = tf.math.reduce_sum(
+          tf.cast(sequence_mask, tf.dtypes.int32))
+      # Flatten logits and labels.
+      flat_logits = _flatten_tensor(logits, sequence_mask, expected_length)
+      flat_labels = _flatten_tensor(labels, sequence_mask, expected_length)
+
+      # Flatten features.
+      flat_features = {}
+      for column in self._feature_columns:
+        if column not in features:
+          raise ValueError('`{}` column expected in features '
+                           'dictionary.'.format(column))
+        flat_features[column] = _flatten_tensor(features[column], sequence_mask,
+                                                expected_length)
+
+      return flat_labels, flat_logits, flat_features
+
+  def loss(self,
+           logits,
+           labels,
+           features=None,
+           mode=None,
+           regularization_losses=None):
+    """Flattens input and returns regularized training loss.
+
+    Flattens `logits`, `labels`, and `features` tensors that are specified by
+    the head's `feature_columns` before calling the static head's `loss` method.
+
+    Args:
+      logits: Logits `Tensor` of rank >= 2 and shape [batch_size, seq_length,
+        D2, ... DN].
+      labels: Labels `Tensor` or `SparseTensor` or rank >= 2 and shape
+        [batch_size, seq_length, D2, ... DN].
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Must contain the sequence length mask tensor. Features
+        corresponding to the sequential's head `feature_columns` are flattened
+        and passed to the static head's `loss` method.
+      mode: Estimator's `ModeKeys`. To be used in case loss calculation is
+        different in Train and Eval mode.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      A scalar `Tensor` representing regularized training loss used in train and
+      eval.
+    """
+    flat_labels, flat_logits, flat_features = self._flatten(
+        labels, logits, features)
+    return self._static_head.loss(
+        logits=flat_logits,
+        labels=flat_labels,
+        features=flat_features,
+        mode=mode,
+        regularization_losses=regularization_losses)
+
+  def create_estimator_spec(self,
+                            features,
+                            mode,
+                            logits,
+                            labels=None,
+                            optimizer=None,
+                            trainable_variables=None,
+                            train_op_fn=None,
+                            update_ops=None,
+                            regularization_losses=None):
+    """Returns `EstimatorSpec` that a model_fn can return.
+
+    If in TRAIN or EVAL mode, `logits`, `labels`, and `features` tensors
+    corresponding to the head's `feature_columns` are flattened before calling
+    the static head's `create_estimator_spec` method.
+    If in PREDICT mode, no flattening is done. The `EstimatatorSpec` is computed
+    using the static head's `create_estimator_spec` method. The sequence length
+    mask tensor is added to the predictions dictionary.
+
+    Args:
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. If in TRAIN or EVAL mode, only specified features are
+        flattened and passed to the static head's method.
+      mode: Estimator's `ModeKeys`.
+      logits: Logits `Tensor` of rank >= 2 and shape [batch_size, seq_length,
+        D2, ... DN].
+      labels: Labels `Tensor` or `SparseTensor` or rank >= 2 and shape
+        [batch_size, seq_length, D2, ... DN].
+      optimizer: An `tf.keras.optimizers.Optimizer` instance to optimize the
+        loss in TRAIN mode. Namely, sets
+        `train_op = optimizer.get_updates(loss, trainable_variables)`, which
+        updates variables to minimize `loss`.
+      trainable_variables: A list or tuple of `Variable` objects to update to
+        minimize `loss`. In Tensorflow 1.x, by default these are the list of
+        variables collected in the graph under the key
+        `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
+        collections and GraphKeys, trainable_variables need to be passed
+        explicitly here.
+      train_op_fn: Function that takes a scalar loss `Tensor` and returns an op
+        to optimize the model with the loss in TRAIN mode. Used if `optimizer`
+        is `None`. Exactly one of `train_op_fn` and `optimizer` must be set in
+        TRAIN mode. By default, it is `None` in other modes. If you want to
+        optimize loss yourself, you can pass `lambda _: tf.no_op()` and then use
+          `EstimatorSpec.loss` to compute and apply gradients.
+      update_ops: A list or tuple of update ops to be run at training time. For
+        example, layers such as BatchNormalization create mean and variance
+        update ops that need to be run at training time. In Tensorflow 1.x,
+        these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
+        doesn't have collections, update_ops need to be passed explicitly here.
+      regularization_losses: A list of additional scalar losses to be added to
+        the training loss, such as regularization losses.
+
+    Returns:
+      `EstimatorSpec`.
+    """
+    if mode == ModeKeys.PREDICT:
+      spec = self._static_head.create_estimator_spec(
+          features=features, mode=mode, logits=logits)
+      spec.predictions[self.input_sequence_mask_key] = features[
+          self.input_sequence_mask_key]
+      return spec._replace(predictions=spec.predictions)
+
+    flat_labels, flat_logits, flat_features = self._flatten(
+        labels, logits, features)
+
+    return self._static_head.create_estimator_spec(
+        features=flat_features,
+        mode=mode,
+        logits=flat_logits,
+        trainable_variables=trainable_variables,
+        labels=flat_labels,
+        optimizer=optimizer,
+        train_op_fn=train_op_fn,
+        regularization_losses=regularization_losses,
+        update_ops=update_ops)
+
+  def update_metrics(self,
+                     eval_metrics,
+                     features,
+                     logits,
+                     labels,
+                     regularization_losses=None):
+    """Updates metric objects and returns a `dict` of the updated metrics.
+
+    Flattens `logits`, `labels`, and `features` tensors that are specified by
+    the head's feature_columns` before calling the static head's
+    `update_metrics` method.
+
+    Args:
+      eval_metrics: A `dict` of metrics to be updated.
+      features: Input `dict` mapping string feature names to `Tensor` or
+        `SparseTensor` objects containing the values for that feature in a
+        minibatch. Only specified features are flattened and passed to the
+        static head's method.
+      logits: Logits `Tensor` of rank >= 2 and shape [batch_size, seq_length,
+        D2, ... DN].
+      labels: Labels `Tensor` or `SparseTensor` or rank >= 2 and shape
+        [batch_size, seq_length, D2, ... DN].
+      regularization_losses: A list of additional scalar losses to be added to
+        the training and evaluation loss, such as regularization losses.
+
+    Returns:
+       A `dict` of updated metrics keyed by name. The value is an instance of
+       `Metric` class.
+    """
+    flat_labels, flat_logits, flat_features = self._flatten(
+        labels, logits, features)
+    return self._static_head.update_metrics(
+        eval_metrics=eval_metrics,
+        features=flat_features,
+        logits=flat_logits,
+        labels=flat_labels,
+        regularization_losses=regularization_losses)
+
+  def _create_tpu_estimator_spec(self,
+                                 features,
+                                 mode,
+                                 logits,
+                                 labels=None,
+                                 optimizer=None,
+                                 trainable_variables=None,
+                                 train_op_fn=None,
+                                 update_ops=None,
+                                 regularization_losses=None):
+    raise NotImplementedError
+
+  def predictions(self, logits, keys=None):
+    """Calls the static head's `predictions` method."""
+    return self._static_head.predictions(logits, keys=keys)
+
+  def metrics(self, regularization_losses=None):
+    """Calls the static head's `metrics` method."""
+    return self._static_head.metrics(regularization_losses)
+
+  @property
+  def input_sequence_mask_key(self):
+    """Returns the key for the sequence mask feature."""
+    return self._sequence_length_mask
+
+  @property
+  def logits_dimension(self):
+    """Returns the logits dimension of the static head."""
+    return self._static_head.logits_dimension
+
+  @property
+  def loss_reduction(self):
+    """Returns the loss reduction of the static head."""
+    return self._static_head.loss_reduction
+
+  @property
+  def name(self):
+    """Returns the name of the static head."""
+    if self._static_head.name:
+      return '{}_sequential'.format(self._static_head.name)
+    return None
+
+  @property
+  def static_head(self):
+    """Returns the wrapped static head."""
+    return self._static_head
+
+
+def _flatten_tensor(tensor, sequence_mask, expected_length):
+  """Flattens the two first dimensions and reshapes a tensor or sparse tensor.
+
+  If `tensor` is a dense tensor, the sequence_mask is used to infer valid
+  inputs.
+
+  Note: If `tensor` is a `SparseTensor` and the indices are not sorted, they
+  will be reordered.
+
+  Args:
+    tensor: A `Tensor` or `SparseTensor` of dimension at least 2, of shape
+      [batch_size, seq_length, D0, D1, ..., DN].
+    sequence_mask: A boolean `Tensor` of shape [batch_size, seq_length].
+    expected_length: A integer scalar `Tensor` with the expected length of the
+      resulting flattenned Tensor.
+
+  Returns:
+    A `Tensor` object of shape [expected_length, D0, D1, ..., DN].
+
+  Raises:
+    ValueError: If `tensor` has not at least 2 dimensions.
+    ValueError: If `tensor` is not a `Tensor` or `SparseTensor` object.
+    InvalidArgumentError: If the resulting `Tensor` doesn't have the expected
+      length.
+  """
+  shape = tensor.get_shape()
+  if shape.ndims < 2:
+    raise ValueError('Input tensor expected to have at least 2 dimensions, '
+                     'got {} instead.'.format(shape.ndims))
+  if isinstance(tensor, tf.sparse.SparseTensor):
+    # What follows depends on the indices ordering. Hence we reorder the indices
+    # to ensure correctness.
+    flat_tensor = tf.sparse.reorder(tensor).values
+    if shape.ndims > 2:
+      new_shape = tf.concat([[-1], shape[2:]], axis=0)
+      flat_tensor = tf.reshape(tensor.values, new_shape)
+  elif isinstance(tensor, tf.Tensor):
+    flat_tensor = tf.boolean_mask(tensor, sequence_mask)
+  else:
+    raise ValueError('`tensor` expected to be a `Tensor` or  `SparseTensor` '
+                     'got `{}` instead.'.format(tensor))
+  if shape.ndims == 2:
+    flat_tensor = tf.compat.v1.expand_dims(flat_tensor, -1)
+    expected_shape = tf.concat([[expected_length], [1]], axis=0)
+  else:
+    expected_shape = tf.concat([[expected_length], shape[2:]], axis=0)
+
+  # TODO(b/119617064): Unify eager and graph implementations.
+  err_message = 'Tensor shape is incompatible with provided mask.'
+  if tf.executing_eagerly():
+    if flat_tensor._shape_tuple() != tuple(expected_shape.numpy()):  # pylint: disable=protected-access
+      raise ValueError(err_message)
+    return flat_tensor
+  with tf.control_dependencies([
+      tf.compat.v1.debugging.assert_equal(
+          tf.compat.v1.shape(flat_tensor), expected_shape, message=err_message)
+  ]):
+    return tf.identity(flat_tensor)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/basic_session_run_hooks.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/basic_session_run_hooks.py
new file mode 100644
index 00000000..e305c9d3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/basic_session_run_hooks.py
@@ -0,0 +1,49 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Some common SessionRunHook classes."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
+from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverListener
+from tensorflow.python.training.basic_session_run_hooks import FeedFnHook
+from tensorflow.python.training.basic_session_run_hooks import FinalOpsHook
+from tensorflow.python.training.basic_session_run_hooks import GlobalStepWaiterHook
+from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
+from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
+from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
+from tensorflow.python.training.basic_session_run_hooks import ProfilerHook
+from tensorflow.python.training.basic_session_run_hooks import SecondOrStepTimer
+from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
+from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
+from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
+from tensorflow.python.util.tf_export import estimator_export
+
+estimator_export("estimator.SecondOrStepTimer")(SecondOrStepTimer)
+estimator_export("estimator.LoggingTensorHook")(LoggingTensorHook)
+estimator_export("estimator.StopAtStepHook")(StopAtStepHook)
+estimator_export("estimator.CheckpointSaverListener")(CheckpointSaverListener)
+estimator_export("estimator.CheckpointSaverHook")(CheckpointSaverHook)
+estimator_export("estimator.StepCounterHook")(StepCounterHook)
+estimator_export("estimator.NanLossDuringTrainingError")(
+    NanLossDuringTrainingError)
+estimator_export("estimator.NanTensorHook")(NanTensorHook)
+estimator_export("estimator.SummarySaverHook")(SummarySaverHook)
+estimator_export("estimator.GlobalStepWaiterHook")(GlobalStepWaiterHook)
+estimator_export("estimator.FinalOpsHook")(FinalOpsHook)
+estimator_export("estimator.FeedFnHook")(FeedFnHook)
+estimator_export("estimator.ProfilerHook")(ProfilerHook)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/fake_summary_writer.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/fake_summary_writer.py
new file mode 100644
index 00000000..c04755ae
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/fake_summary_writer.py
@@ -0,0 +1,143 @@
+# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Fake summary writer for unit tests."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.core.framework import summary_pb2
+from tensorflow.python.framework import test_util
+from tensorflow.python.summary.writer import writer
+from tensorflow.python.summary.writer import writer_cache
+
+
+# TODO(ptucker): Replace with mock framework.
+class FakeSummaryWriter(object):
+  """Fake summary writer."""
+
+  _replaced_summary_writer = None
+
+  @classmethod
+  def install(cls):
+    if cls._replaced_summary_writer:
+      raise ValueError('FakeSummaryWriter already installed.')
+    cls._replaced_summary_writer = writer.FileWriter
+    writer.FileWriter = FakeSummaryWriter
+    writer_cache.FileWriter = FakeSummaryWriter
+
+  @classmethod
+  def uninstall(cls):
+    if not cls._replaced_summary_writer:
+      raise ValueError('FakeSummaryWriter not installed.')
+    writer.FileWriter = cls._replaced_summary_writer
+    writer_cache.FileWriter = cls._replaced_summary_writer
+    cls._replaced_summary_writer = None
+
+  def __init__(self, logdir, graph=None):
+    self._logdir = logdir
+    self._graph = graph
+    self._summaries = {}
+    self._added_graphs = []
+    self._added_meta_graphs = []
+    self._added_session_logs = []
+    self._added_run_metadata = {}
+
+  @property
+  def summaries(self):
+    return self._summaries
+
+  def assert_summaries(self,
+                       test_case,
+                       expected_logdir=None,
+                       expected_graph=None,
+                       expected_summaries=None,
+                       expected_added_graphs=None,
+                       expected_added_meta_graphs=None,
+                       expected_session_logs=None):
+    """Assert expected items have been added to summary writer."""
+    if expected_logdir is not None:
+      test_case.assertEqual(expected_logdir, self._logdir)
+    if expected_graph is not None:
+      test_case.assertTrue(expected_graph is self._graph)
+    expected_summaries = expected_summaries or {}
+    for step in expected_summaries:
+      test_case.assertTrue(
+          step in self._summaries,
+          msg='Missing step %s from %s.' % (step, self._summaries.keys()))
+      actual_simple_values = {}
+      for step_summary in self._summaries[step]:
+        for v in step_summary.value:
+          # Ignore global_step/sec since it's written by Supervisor in a
+          # separate thread, so it's non-deterministic how many get written.
+          if 'global_step/sec' != v.tag:
+            actual_simple_values[v.tag] = v.simple_value
+      test_case.assertEqual(expected_summaries[step], actual_simple_values)
+    if expected_added_graphs is not None:
+      test_case.assertEqual(expected_added_graphs, self._added_graphs)
+    if expected_added_meta_graphs is not None:
+      test_case.assertEqual(
+          len(expected_added_meta_graphs), len(self._added_meta_graphs))
+      for expected, actual in zip(expected_added_meta_graphs,
+                                  self._added_meta_graphs):
+        test_util.assert_meta_graph_protos_equal(test_case, expected, actual)
+    if expected_session_logs is not None:
+      test_case.assertEqual(expected_session_logs, self._added_session_logs)
+
+  def add_summary(self, summ, current_global_step):
+    """Add summary."""
+    if isinstance(summ, bytes):
+      summary_proto = summary_pb2.Summary()
+      summary_proto.ParseFromString(summ)
+      summ = summary_proto
+    if current_global_step in self._summaries:
+      step_summaries = self._summaries[current_global_step]
+    else:
+      step_summaries = []
+      self._summaries[current_global_step] = step_summaries
+    step_summaries.append(summ)
+
+  # NOTE: Ignore global_step since its value is non-deterministic.
+  def add_graph(self, graph, global_step=None, graph_def=None):
+    """Add graph."""
+    if (global_step is not None) and (global_step < 0):
+      raise ValueError('Invalid global_step %s.' % global_step)
+    if graph_def is not None:
+      raise ValueError('Unexpected graph_def %s.' % graph_def)
+    self._added_graphs.append(graph)
+
+  def add_meta_graph(self, meta_graph_def, global_step=None):
+    """Add metagraph."""
+    if (global_step is not None) and (global_step < 0):
+      raise ValueError('Invalid global_step %s.' % global_step)
+    self._added_meta_graphs.append(meta_graph_def)
+
+  # NOTE: Ignore global_step since its value is non-deterministic.
+  def add_session_log(self, session_log, global_step=None):
+    # pylint: disable=unused-argument
+    self._added_session_logs.append(session_log)
+
+  def add_run_metadata(self, run_metadata, tag, global_step=None):
+    if (global_step is not None) and (global_step < 0):
+      raise ValueError('Invalid global_step %s.' % global_step)
+    self._added_run_metadata[tag] = run_metadata
+
+  def flush(self):
+    pass
+
+  def reopen(self):
+    pass
+
+  def close(self):
+    pass
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/hooks.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/hooks.py
new file mode 100644
index 00000000..cda7c988
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/hooks.py
@@ -0,0 +1,283 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Some useful session run hooks."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import time
+import tensorflow as tf
+from tensorflow.python.training import training_util
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+
+
+# pylint: disable=protected-access
+@estimator_export('estimator.experimental.InMemoryEvaluatorHook')
+class InMemoryEvaluatorHook(tf.compat.v1.train.SessionRunHook):
+  """Hook to run evaluation in training without a checkpoint.
+
+  Example:
+
+  ```python
+  def train_input_fn():
+    ...
+    return train_dataset
+
+  def eval_input_fn():
+    ...
+    return eval_dataset
+
+  estimator = tf.estimator.DNNClassifier(...)
+
+  evaluator = tf.estimator.experimental.InMemoryEvaluatorHook(
+      estimator, eval_input_fn)
+  estimator.train(train_input_fn, hooks=[evaluator])
+  ```
+
+  Current limitations of this approach are:
+
+  * It doesn't support multi-node distributed mode.
+  * It doesn't support saveable objects other than variables (such as boosted
+    tree support)
+  * It doesn't support custom saver logic (such as ExponentialMovingAverage
+    support)
+
+  """
+
+  def __init__(self,
+               estimator,
+               input_fn,
+               steps=None,
+               hooks=None,
+               name=None,
+               every_n_iter=100):
+    """Initializes a `InMemoryEvaluatorHook`.
+
+    Args:
+      estimator: A `tf.estimator.Estimator` instance to call evaluate.
+      input_fn:  Equivalent to the `input_fn` arg to `estimator.evaluate`. A
+        function that constructs the input data for evaluation. See [Creating
+        input functions](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+          for more information. The function should construct and return one of
+        the following:
+          * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
+            tuple (features, labels) with same constraints as below.
+          * A tuple (features, labels): Where `features` is a `Tensor` or a
+            dictionary of string feature name to `Tensor` and `labels` is a
+            `Tensor` or a dictionary of string label name to `Tensor`. Both
+            `features` and `labels` are consumed by `model_fn`. They should
+            satisfy the expectation of `model_fn` from inputs.
+      steps: Equivalent to the `steps` arg to `estimator.evaluate`.  Number of
+        steps for which to evaluate model. If `None`, evaluates until `input_fn`
+        raises an end-of-input exception.
+      hooks: Equivalent to the `hooks` arg to `estimator.evaluate`. List of
+        `SessionRunHook` subclass instances. Used for callbacks inside the
+        evaluation call.
+      name:  Equivalent to the `name` arg to `estimator.evaluate`. Name of the
+        evaluation if user needs to run multiple evaluations on different data
+        sets, such as on training data vs test data. Metrics for different
+        evaluations are saved in separate folders, and appear separately in
+        tensorboard.
+      every_n_iter: `int`, runs the evaluator once every N training iteration.
+
+    Raises:
+      ValueError: if `every_n_iter` is non-positive or it's not a single machine
+        training
+    """
+    if every_n_iter is None or every_n_iter <= 0:
+      raise ValueError('invalid every_n_iter=%s.' % every_n_iter)
+    if (estimator.config.num_ps_replicas > 0 or
+        estimator.config.num_worker_replicas > 1):
+      raise ValueError(
+          'InMemoryEvaluator supports only single machine (aka Local) setting.')
+    self._estimator = estimator
+    self._input_fn = input_fn
+    self._steps = steps
+    self._name = name
+    self._every_n_iter = every_n_iter
+    self._eval_dir = os.path.join(self._estimator.model_dir,
+                                  'eval' if not name else 'eval_' + name)
+
+    self._graph = None
+    self._hooks = estimator_lib._check_hooks_type(hooks)
+    self._hooks.extend(self._estimator._convert_eval_steps_to_hooks(steps))
+    self._timer = tf.compat.v1.train.SecondOrStepTimer(every_steps=every_n_iter)
+
+  def begin(self):
+    """Build eval graph and restoring op."""
+    self._timer.reset()
+    self._iter_count = 0
+    self._graph = tf.Graph()
+    with self._graph.as_default():
+      (self._scaffold, self._update_op, self._eval_dict,
+       self._all_hooks) = self._estimator._evaluate_build_graph(
+           self._input_fn, self._hooks, checkpoint_path=None)
+
+      if self._scaffold.saver is not None:
+        raise ValueError('InMemoryEvaluator does not support custom saver')
+      if self._scaffold.init_fn is not None:
+        raise ValueError('InMemoryEvaluator does not support custom init_fn')
+
+      self._var_name_to_eval_var = {
+          v.name: v for v in tf.compat.v1.get_collection(
+              tf.compat.v1.GraphKeys.GLOBAL_VARIABLES)
+      }
+      self._var_name_to_placeholder = {
+          v.name: tf.compat.v1.placeholder(v.dtype) for v in
+          tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.GLOBAL_VARIABLES)
+      }
+
+  def after_create_session(self, session, coord):  # pylint: disable=unused-argument
+    """Does first run which shows the eval metrics before training."""
+    if tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.SAVEABLE_OBJECTS):
+      raise ValueError(
+          'InMemoryEvaluator does not support saveables other than global '
+          'variables.')
+    self._var_name_to_train_var = {
+        v.name: v for v in tf.compat.v1.get_collection(
+            tf.compat.v1.GraphKeys.GLOBAL_VARIABLES)
+    }
+    var_names_to_transfer = set(self._var_name_to_placeholder.keys()) & set(
+        self._var_name_to_train_var.keys())
+    # Filter training var names that are not exist in evaluation
+    self._var_name_to_train_var = {
+        v_name: self._var_name_to_train_var[v_name]
+        for v_name in var_names_to_transfer
+    }
+    # Filter eval var names that are not exist in training
+    self._var_name_to_eval_var = {
+        v_name: self._var_name_to_eval_var[v_name]
+        for v_name in var_names_to_transfer
+    }
+
+    with self._graph.as_default():
+      self._var_feed_op = tf.group([
+          tf.compat.v1.assign(self._var_name_to_eval_var[v_name],
+                              self._var_name_to_placeholder[v_name])
+          for v_name in var_names_to_transfer
+      ])
+
+    self._evaluate(session)
+
+  def _evaluate(self, train_session):
+    var_name_to_value = train_session.run(self._var_name_to_train_var)
+    placeholder_to_value = {
+        self._var_name_to_placeholder[v_name]: var_name_to_value[v_name]
+        for v_name in var_name_to_value
+    }
+
+    def feed_variables(scaffold, session):
+      del scaffold
+      session.run(self._var_feed_op, feed_dict=placeholder_to_value)
+
+    scaffold = tf.compat.v1.train.Scaffold(
+        init_fn=feed_variables, copy_from_scaffold=self._scaffold)
+
+    with self._graph.as_default():
+      self._estimator._evaluate_run(
+          checkpoint_path=None,
+          scaffold=scaffold,
+          update_op=self._update_op,
+          eval_dict=self._eval_dict,
+          all_hooks=self._all_hooks,
+          output_dir=self._eval_dir)
+
+    self._timer.update_last_triggered_step(self._iter_count)
+
+  def after_run(self, run_context, run_values):  # pylint: disable=unused-argument
+    """Runs evaluator."""
+    self._iter_count += 1
+    if self._timer.should_trigger_for_step(self._iter_count):
+      self._evaluate(run_context.session)
+
+  def end(self, session):  # pylint: disable=unused-argument
+    """Runs evaluator for final model."""
+    self._evaluate(session)
+
+
+class _StopAtCheckpointStepHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop at a specified step based on checkpoint.
+
+  Note: We recommend using 'make_stop_at_checkpoint_step_hook` to get the proper
+  hook.
+  """
+
+  def __init__(self, model_dir, last_step, wait_after_file_check_secs=30):
+    """Initializes a `StopAtCheckpointStepHook`.
+
+    This hook requests stop after a last step has been reached. It checks latest
+    checkpoint to verify last step is written on disk or not.
+
+    Args:
+      model_dir: Directory to read global step from latest checkpoint.
+      last_step: Step after which to stop.
+      wait_after_file_check_secs: Reading same file by many workers may create
+        I/O issues. To throttle that we will wait given secs after each read of
+        the file.
+
+    Raises:
+      ValueError: If one of the arguments is invalid.
+    """
+    if last_step is None:
+      raise ValueError('last_step must be specified.')
+    if model_dir is None:
+      raise ValueError('model_dir must be specified.')
+
+    self._model_dir = model_dir
+    self._last_step = last_step
+    self._wait_after_file_check_secs = wait_after_file_check_secs
+
+  def begin(self):
+    self._global_step_tensor = training_util._get_or_create_global_step_read()  # pylint: disable=protected-access
+    if self._global_step_tensor is None:
+      raise RuntimeError(
+          'Global step should be created to use StopAtCheckpointStepHook.')
+
+  def before_run(self, run_context):  # pylint: disable=unused-argument
+    return tf.compat.v1.train.SessionRunArgs(self._global_step_tensor)
+
+  def after_run(self, run_context, run_values):
+    global_step = run_values.results + 1
+    if global_step >= self._last_step:
+      # Check latest global step in the checkpoint to ensure that the targeted
+      # last step is written on disk.
+
+      step = estimator_lib._load_global_step_from_checkpoint_dir(
+          self._model_dir)
+      if step >= self._last_step:
+        run_context.request_stop()
+      else:
+        time.sleep(self._wait_after_file_check_secs)
+
+
+@estimator_export('estimator.experimental.make_stop_at_checkpoint_step_hook')
+def make_stop_at_checkpoint_step_hook(estimator,
+                                      last_step,
+                                      wait_after_file_check_secs=30):
+  """Creates a proper StopAtCheckpointStepHook based on chief status."""
+
+  if estimator.config.is_chief:
+    return tf.compat.v1.train.StopAtStepHook(last_step=last_step)
+  return _StopAtCheckpointStepHook(
+      model_dir=estimator.model_dir,
+      last_step=last_step,
+      wait_after_file_check_secs=wait_after_file_check_secs)
+
+
+# pylint: enable=protected-access
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/session_run_hook.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/session_run_hook.py
new file mode 100644
index 00000000..5d92948a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/hooks/session_run_hook.py
@@ -0,0 +1,101 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""A SessionRunHook extends `session.run()` calls for the `MonitoredSession`.
+
+SessionRunHooks are useful to track training, report progress, request early
+stopping and more. SessionRunHooks use the observer pattern and notify at the
+following points:
+ - when a session starts being used
+ - before a call to the `session.run()`
+ - after a call to the `session.run()`
+ - when the session closed
+
+A SessionRunHook encapsulates a piece of reusable/composable computation that
+can piggyback a call to `MonitoredSession.run()`. A hook can add any
+ops-or-tensor/feeds to the run call, and when the run call finishes with success
+gets the outputs it requested. Hooks are allowed to add ops to the graph in
+`hook.begin()`. The graph is finalized after the `begin()` method is called.
+
+There are a few pre-defined hooks:
+ - StopAtStepHook: Request stop based on global_step
+ - CheckpointSaverHook: saves checkpoint
+ - LoggingTensorHook: outputs one or more tensor values to log
+ - NanTensorHook: Request stop if given `Tensor` contains Nans.
+ - SummarySaverHook: saves summaries to a summary writer
+
+For more specific needs, you can create custom hooks:
+  class ExampleHook(SessionRunHook):
+    def begin(self):
+      # You can add ops to the graph here.
+      print('Starting the session.')
+      self.your_tensor = ...
+
+    def after_create_session(self, session, coord):
+      # When this is called, the graph is finalized and
+      # ops can no longer be added to the graph.
+      print('Session created.')
+
+    def before_run(self, run_context):
+      print('Before calling session.run().')
+      return SessionRunArgs(self.your_tensor)
+
+    def after_run(self, run_context, run_values):
+      print('Done running one step. The value of my tensor: %s',
+            run_values.results)
+      if you-need-to-stop-loop:
+        run_context.request_stop()
+
+    def end(self, session):
+      print('Done with the session.')
+
+To understand how hooks interact with calls to `MonitoredSession.run()`,
+look at following code:
+  with MonitoredTrainingSession(hooks=your_hooks, ...) as sess:
+    while not sess.should_stop():
+      sess.run(your_fetches)
+
+Above user code leads to following execution:
+  call hooks.begin()
+  sess = tf.Session()
+  call hooks.after_create_session()
+  while not stop is requested:
+    call hooks.before_run()
+    try:
+      results = sess.run(merged_fetches, feed_dict=merged_feeds)
+    except (errors.OutOfRangeError, StopIteration):
+      break
+    call hooks.after_run()
+  call hooks.end()
+  sess.close()
+
+Note that if sess.run() raises OutOfRangeError or StopIteration then
+hooks.after_run() will not be called but hooks.end() will still be called.
+If sess.run() raises any other exception then neither hooks.after_run() nor
+hooks.end() will be called.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from tensorflow.python.training.session_run_hook import SessionRunArgs
+from tensorflow.python.training.session_run_hook import SessionRunContext
+from tensorflow.python.training.session_run_hook import SessionRunHook
+from tensorflow.python.training.session_run_hook import SessionRunValues
+from tensorflow.python.util.tf_export import estimator_export
+
+estimator_export("estimator.SessionRunHook")(SessionRunHook)
+estimator_export("estimator.SessionRunArgs")(SessionRunArgs)
+estimator_export("estimator.SessionRunContext")(SessionRunContext)
+estimator_export("estimator.SessionRunValues")(SessionRunValues)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/inputs.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/inputs.py
new file mode 100644
index 00000000..c5a52547
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/inputs.py
@@ -0,0 +1,25 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utility methods to create simple input_fns."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+# pylint: disable=unused-import,line-too-long
+from tensorflow_estimator.python.estimator.inputs.numpy_io import numpy_input_fn
+from tensorflow_estimator.python.estimator.inputs.pandas_io import pandas_input_fn
+
+# pylint: enable=unused-import,line-too-long
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/numpy_io.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/numpy_io.py
new file mode 100644
index 00000000..31871b10
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/numpy_io.py
@@ -0,0 +1,224 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Methods to allow dict of numpy arrays."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+import numpy as np
+from six import string_types
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator.inputs.queues import feeding_functions
+
+# Key name to pack the target into dict of `features`. See
+# `_get_unique_target_key` for details.
+_TARGET_KEY = '__target_key__'
+
+
+def _get_unique_target_key(features):
+  """Returns a key not existed in the input dict `features`.
+
+  Caller of `input_fn` usually provides `features` (dict of numpy arrays) and
+  `target`, but the underlying feeding module expects a single dict of numpy
+  arrays as input. So, the `target` needs to be packed into the `features`
+  temporarily and unpacked after calling the feeding function. Toward this goal,
+  this function returns a key not existed in the `features` to pack the
+  `target`.
+
+  Args:
+    features: OrderedDict of numpy arrays
+
+  Returns:
+    A unique key that can be used to insert the subsequent target into
+      features dict.
+  """
+  target_key = _TARGET_KEY
+  while target_key in features:
+    target_key += '_n'
+  return target_key
+
+
+def _validate_and_convert_features(x):
+  """Type check input data and make a shadow copy as an ordered dict.
+
+  Args:
+    x: numpy array object or dict of numpy array objects. If an array, the array
+      will be treated as a single feature.
+
+  Returns:
+    OrderedDict copy of x.
+
+  Raises:
+    ValueError: if x is empty
+    TypeError: if x is an unknown type.
+  """
+  if isinstance(x, dict):
+    if not x:
+      raise ValueError('x cannot be an empty dict')
+    # Make a shadow copy and also ensure the order of iteration is consistent.
+    ordered_dict_data = collections.OrderedDict(
+        sorted(x.items(), key=lambda t: t[0]))
+  elif isinstance(x, np.ndarray):
+    if x.size == 0:
+      raise ValueError('x cannot be an empty array')
+
+    # Make a shadow copy and convert to dict to align with dict processing.
+    ordered_dict_data = collections.OrderedDict({'__direct_np_input__': x})
+  else:
+    x_type = type(x).__name__
+    raise TypeError('x must be a dict or array; got {}'.format(x_type))
+
+  return ordered_dict_data
+
+
+@estimator_export(v1=['estimator.inputs.numpy_input_fn'])
+def numpy_input_fn(x,
+                   y=None,
+                   batch_size=128,
+                   num_epochs=1,
+                   shuffle=None,
+                   queue_capacity=1000,
+                   num_threads=1):
+  """Returns input function that would feed dict of numpy arrays into the model.
+
+  This returns a function outputting `features` and `targets` based on the dict
+  of numpy arrays. The dict `features` has the same keys as the `x`. The dict
+  `targets` has the same keys as the `y` if `y` is a dict.
+
+  Example:
+
+  ```python
+  age = np.arange(4) * 1.0
+  height = np.arange(32, 36)
+  x = {'age': age, 'height': height}
+  y = np.arange(-32, -28)
+
+  with tf.Session() as session:
+    input_fn = numpy_io.numpy_input_fn(
+        x, y, batch_size=2, shuffle=False, num_epochs=1)
+  ```
+
+  Args:
+    x: numpy array object or dict of numpy array objects. If an array, the array
+      will be treated as a single feature.
+    y: numpy array object or dict of numpy array object. `None` if absent.
+    batch_size: Integer, size of batches to return.
+    num_epochs: Integer, number of epochs to iterate over data. If `None` will
+      run forever.
+    shuffle: Boolean, if True shuffles the queue. Avoid shuffle at prediction
+      time.
+    queue_capacity: Integer, size of queue to accumulate.
+    num_threads: Integer, number of threads used for reading and enqueueing. In
+      order to have predicted and repeatable order of reading and enqueueing,
+      such as in prediction and evaluation mode, `num_threads` should be 1.
+
+  Returns:
+    Function, that has signature of ()->(dict of `features`, `targets`)
+
+  Raises:
+    ValueError: if the shape of `y` mismatches the shape of values in `x` (i.e.,
+      values in `x` have same shape).
+    ValueError: if duplicate keys are in both `x` and `y` when `y` is a dict.
+    ValueError: if x or y is an empty dict.
+    TypeError: `x` is not a dict or array.
+    ValueError: if 'shuffle' is not provided or a bool.
+  """
+  if not isinstance(shuffle, bool):
+    raise ValueError('shuffle must be provided and explicitly set as boolean '
+                     '(it is recommended to set it as True for training); '
+                     'got {}'.format(shuffle))
+
+  def input_fn():
+    """Numpy input function."""
+
+    # Note that `x` should not be used after conversion to ordered_dict_data,
+    # as type could be either dict or array.
+    ordered_dict_data = _validate_and_convert_features(x)
+
+    # Deep copy keys which is a view in python 3
+    feature_keys = list(ordered_dict_data.keys())
+
+    if y is None:
+      target_keys = None
+    elif isinstance(y, dict):
+      if not y:
+        raise ValueError('y cannot be empty dict, use None instead.')
+
+      ordered_dict_y = collections.OrderedDict(
+          sorted(y.items(), key=lambda t: t[0]))
+      target_keys = list(ordered_dict_y.keys())
+
+      duplicate_keys = set(feature_keys).intersection(set(target_keys))
+      if duplicate_keys:
+        raise ValueError('{} duplicate keys are found in both x and y: '
+                         '{}'.format(len(duplicate_keys), duplicate_keys))
+
+      ordered_dict_data.update(ordered_dict_y)
+    else:
+      target_keys = _get_unique_target_key(ordered_dict_data)
+      ordered_dict_data[target_keys] = y
+
+    if len(set(v.shape[0] for v in ordered_dict_data.values())) != 1:
+      shape_dict_of_x = {k: ordered_dict_data[k].shape for k in feature_keys}
+
+      if target_keys is None:
+        shape_of_y = None
+      elif isinstance(target_keys, string_types):
+        shape_of_y = y.shape
+      else:
+        shape_of_y = {k: ordered_dict_data[k].shape for k in target_keys}
+
+      raise ValueError('Length of tensors in x and y is mismatched. All '
+                       'elements in x and y must have the same length.\n'
+                       'Shapes in x: {}\n'
+                       'Shapes in y: {}\n'.format(shape_dict_of_x, shape_of_y))
+
+    queue = feeding_functions._enqueue_data(  # pylint: disable=protected-access
+        ordered_dict_data,
+        queue_capacity,
+        shuffle=shuffle,
+        num_threads=num_threads,
+        enqueue_size=batch_size,
+        num_epochs=num_epochs)
+
+    batch = (
+        queue.dequeue_many(batch_size)
+        if num_epochs is None else queue.dequeue_up_to(batch_size))
+
+    # Remove the first `Tensor` in `batch`, which is the row number.
+    if batch:
+      batch.pop(0)
+
+    if isinstance(x, np.ndarray):
+      # Return as the same type as original array.
+      features = batch[0]
+    else:
+      # Return as the original dict type
+      features = dict(zip(feature_keys, batch[:len(feature_keys)]))
+
+    if target_keys is None:
+      # TODO(martinwicke), return consistent result
+      return features
+    elif isinstance(target_keys, string_types):
+      target = batch[-1]
+      return features, target
+    else:
+      target = dict(zip(target_keys, batch[-len(target_keys):]))
+      return features, target
+
+  return input_fn
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/pandas_io.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/pandas_io.py
new file mode 100644
index 00000000..d7b5febb
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/pandas_io.py
@@ -0,0 +1,158 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Methods to allow pandas.DataFrame."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import uuid
+import numpy as np
+import six
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator.inputs.queues import feeding_functions
+
+try:
+  # pylint: disable=g-import-not-at-top
+  # pylint: disable=unused-import
+  import pandas as pd
+  HAS_PANDAS = True
+except IOError:
+  # Pandas writes a temporary file during import. If it fails, don't use pandas.
+  HAS_PANDAS = False
+except ImportError:
+  HAS_PANDAS = False
+
+
+def _get_unique_target_key(features, target_column_name):
+  """Returns a key that does not exist in the input DataFrame `features`.
+
+  Args:
+    features: DataFrame
+    target_column_name: Name of the target column as a `str`
+
+  Returns:
+    A unique key that can be used to insert the target into
+      features.
+  """
+  if target_column_name in features:
+    target_column_name += '_' + str(uuid.uuid4())
+  return target_column_name
+
+
+@estimator_export(v1=['estimator.inputs.pandas_input_fn'])
+def pandas_input_fn(x,
+                    y=None,
+                    batch_size=128,
+                    num_epochs=1,
+                    shuffle=None,
+                    queue_capacity=1000,
+                    num_threads=1,
+                    target_column='target'):
+  """Returns input function that would feed Pandas DataFrame into the model.
+
+  Note: `y`'s index must match `x`'s index.
+
+  Args:
+    x: pandas `DataFrame` object.
+    y: pandas `Series` object or `DataFrame`. `None` if absent.
+    batch_size: int, size of batches to return.
+    num_epochs: int, number of epochs to iterate over data. If not `None`, read
+      attempts that would exceed this value will raise `OutOfRangeError`.
+    shuffle: bool, whether to read the records in random order.
+    queue_capacity: int, size of the read queue. If `None`, it will be set
+      roughly to the size of `x`.
+    num_threads: Integer, number of threads used for reading and enqueueing. In
+      order to have predicted and repeatable order of reading and enqueueing,
+      such as in prediction and evaluation mode, `num_threads` should be 1.
+    target_column: str, name to give the target column `y`. This parameter is
+      not used when `y` is a `DataFrame`.
+
+  Returns:
+    Function, that has signature of ()->(dict of `features`, `target`)
+
+  Raises:
+    ValueError: if `x` already contains a column with the same name as `y`, or
+      if the indexes of `x` and `y` don't match.
+    ValueError: if 'shuffle' is not provided or a bool.
+  """
+  if not HAS_PANDAS:
+    raise TypeError(
+        'pandas_input_fn should not be called without pandas installed')
+
+  if not isinstance(shuffle, bool):
+    raise ValueError('shuffle must be provided and explicitly set as boolean '
+                     '(it is recommended to set it as True for training); '
+                     'got {}'.format(shuffle))
+
+  if not isinstance(target_column, six.string_types):
+    raise TypeError('target_column must be a string type')
+
+  x = x.copy()
+  if y is not None:
+    if target_column in x:
+      raise ValueError(
+          'Cannot use name %s for target column: DataFrame already has a '
+          'column with that name: %s' % (target_column, x.columns))
+    if not np.array_equal(x.index, y.index):
+      raise ValueError('Index for x and y are mismatched.\nIndex for x: %s\n'
+                       'Index for y: %s\n' % (x.index, y.index))
+    if isinstance(y, pd.DataFrame):
+      y_columns = [
+          (column, _get_unique_target_key(x, column)) for column in list(y)
+      ]
+      target_column = [v for _, v in y_columns]
+      x[target_column] = y
+    else:
+      x[target_column] = y
+
+  # TODO(mdan): These are memory copies. We probably don't need 4x slack space.
+  # The sizes below are consistent with what I've seen elsewhere.
+  if queue_capacity is None:
+    if shuffle:
+      queue_capacity = 4 * len(x)
+    else:
+      queue_capacity = len(x)
+  min_after_dequeue = max(queue_capacity / 4, 1)
+
+  def input_fn():
+    """Pandas input function."""
+    queue = feeding_functions._enqueue_data(  # pylint: disable=protected-access
+        x,
+        queue_capacity,
+        shuffle=shuffle,
+        min_after_dequeue=min_after_dequeue,
+        num_threads=num_threads,
+        enqueue_size=batch_size,
+        num_epochs=num_epochs)
+    if num_epochs is None:
+      features = queue.dequeue_many(batch_size)
+    else:
+      features = queue.dequeue_up_to(batch_size)
+    assert len(features) == len(x.columns) + 1, ('Features should have one '
+                                                 'extra element for the index.')
+    features = features[1:]
+    features = dict(zip(list(x.columns), features))
+    if y is not None:
+      if isinstance(target_column, list):
+        keys = [k for k, _ in y_columns]
+        values = [features.pop(column) for column in target_column]
+        target = {k: v for k, v in zip(keys, values)}
+      else:
+        target = features.pop(target_column)
+      return features, target
+    return features
+
+  return input_fn
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_functions.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_functions.py
new file mode 100644
index 00000000..54e346ae
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_functions.py
@@ -0,0 +1,504 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Helper functions for enqueuing data from arrays and pandas `DataFrame`s."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import random
+import types as tp
+import numpy as np
+import six
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow_estimator.python.estimator.inputs.queues import feeding_queue_runner as fqr
+
+try:
+  # pylint: disable=g-import-not-at-top
+  import pandas as pd
+  HAS_PANDAS = True
+except IOError:
+  # Pandas writes a temporary file during import. If it fails, don't use pandas.
+  HAS_PANDAS = False
+except ImportError:
+  HAS_PANDAS = False
+
+
+def _fill_array(arr, seq, fillvalue=0):
+  """Recursively fills padded arr with elements from seq.
+
+  If length of seq is less than arr padded length, fillvalue used.
+  Args:
+    arr: Padded tensor of shape [batch_size, ..., max_padded_dim_len].
+    seq: Non-padded list of data samples of shape
+      [batch_size, ..., padded_dim(None)]
+    fillvalue: Default fillvalue to use.
+  """
+  if arr.ndim == 1:
+    try:
+      len_ = len(seq)
+    except TypeError:
+      len_ = 0
+    arr[:len_] = seq
+    arr[len_:] = fillvalue
+  else:
+    for subarr, subseq in six.moves.zip_longest(arr, seq, fillvalue=()):
+      _fill_array(subarr, subseq, fillvalue)
+
+
+def _pad_if_needed(batch_key_item, fillvalue=0):
+  """ Returns padded batch.
+
+  Args:
+    batch_key_item: List of data samples of any type with shape
+      [batch_size, ..., padded_dim(None)].
+    fillvalue: Default fillvalue to use.
+
+  Returns:
+    Padded with zeros tensor of same type and shape
+      [batch_size, ..., max_padded_dim_len].
+
+  Raises:
+    ValueError if data samples have different shapes (except last padded dim).
+  """
+  shapes = [
+      seq.shape[:-1] if len(seq.shape) > 0 else -1 for seq in batch_key_item
+  ]
+  if not all(shapes[0] == x for x in shapes):
+    raise ValueError("Array shapes must match.")
+
+  last_length = [
+      seq.shape[-1] if len(seq.shape) > 0 else 0 for seq in batch_key_item
+  ]
+  if all([x == last_length[0] for x in last_length]):
+    return batch_key_item
+
+  batch_size = len(batch_key_item)
+  max_sequence_length = max(last_length)
+  result_batch = np.zeros(
+      shape=[batch_size] + list(shapes[0]) + [max_sequence_length],
+      dtype=batch_key_item[0].dtype)
+  _fill_array(result_batch, batch_key_item, fillvalue)
+  return result_batch
+
+
+def _get_integer_indices_for_next_batch(batch_indices_start, batch_size,
+                                        epoch_end, array_length, current_epoch,
+                                        total_epochs):
+  """Returns the integer indices for next batch.
+
+  If total epochs is not None and current epoch is the final epoch, the end
+  index of the next batch should not exceed the `epoch_end` (i.e., the final
+  batch might not have size `batch_size` to avoid overshooting the last epoch).
+
+  Args:
+    batch_indices_start: Integer, the index to start next batch.
+    batch_size: Integer, size of batches to return.
+    epoch_end: Integer, the end index of the epoch. The epoch could start from a
+      random position, so `epoch_end` provides the end index for that.
+    array_length: Integer, the length of the array.
+    current_epoch: Integer, the epoch number has been emitted.
+    total_epochs: Integer or `None`, the total number of epochs to emit. If
+      `None` will run forever.
+
+  Returns:
+    A tuple of a list with integer indices for next batch and `current_epoch`
+    value after the next batch.
+
+  Raises:
+    OutOfRangeError if `current_epoch` is not less than `total_epochs`.
+
+  """
+  if total_epochs is not None and current_epoch >= total_epochs:
+    raise tf.errors.OutOfRangeError(
+        None, None, "Already emitted %s epochs." % current_epoch)
+
+  batch_indices_end = batch_indices_start + batch_size
+  batch_indices = [
+      j % array_length for j in range(batch_indices_start, batch_indices_end)
+  ]
+  epoch_end_indices = [i for i, x in enumerate(batch_indices) if x == epoch_end]
+  current_epoch += len(epoch_end_indices)
+
+  if total_epochs is None or current_epoch < total_epochs:
+    return (batch_indices, current_epoch)
+
+  # Now we might have emitted more data for expected epochs. Need to trim.
+  final_epoch_end_inclusive = epoch_end_indices[-(current_epoch - total_epochs +
+                                                  1)]
+  batch_indices = batch_indices[:final_epoch_end_inclusive + 1]
+
+  return (batch_indices, total_epochs)
+
+
+class _ArrayFeedFn(object):
+  """Creates feed dictionaries from numpy arrays."""
+
+  def __init__(self,
+               placeholders,
+               array,
+               batch_size,
+               random_start=False,
+               seed=None,
+               num_epochs=None):
+    if len(placeholders) != 2:
+      raise ValueError("_array_feed_fn expects 2 placeholders; got {}.".format(
+          len(placeholders)))
+    self._placeholders = placeholders
+    self._array = array
+    self._max = len(array)
+    self._batch_size = batch_size
+    self._num_epochs = num_epochs
+    self._epoch = 0
+    random.seed(seed)
+    self._trav = random.randrange(self._max) if random_start else 0
+    self._epoch_end = (self._trav - 1) % self._max
+
+  def __call__(self):
+    integer_indexes, self._epoch = _get_integer_indices_for_next_batch(
+        batch_indices_start=self._trav,
+        batch_size=self._batch_size,
+        epoch_end=self._epoch_end,
+        array_length=self._max,
+        current_epoch=self._epoch,
+        total_epochs=self._num_epochs)
+
+    self._trav = (integer_indexes[-1] + 1) % self._max
+    return {
+        self._placeholders[0]: integer_indexes,
+        self._placeholders[1]: self._array[integer_indexes]
+    }
+
+
+class _OrderedDictNumpyFeedFn(object):
+  """Creates feed dictionaries from `OrderedDict`s of numpy arrays."""
+
+  def __init__(self,
+               placeholders,
+               ordered_dict_of_arrays,
+               batch_size,
+               random_start=False,
+               seed=None,
+               num_epochs=None):
+    if len(placeholders) != len(ordered_dict_of_arrays) + 1:
+      raise ValueError("Expected {} placeholders; got {}.".format(
+          len(ordered_dict_of_arrays), len(placeholders)))
+    self._index_placeholder = placeholders[0]
+    self._col_placeholders = placeholders[1:]
+    self._ordered_dict_of_arrays = ordered_dict_of_arrays
+    self._max = len(next(iter(ordered_dict_of_arrays.values())))
+    for _, v in ordered_dict_of_arrays.items():
+      if len(v) != self._max:
+        raise ValueError("Array lengths must match.")
+    self._batch_size = batch_size
+    self._num_epochs = num_epochs
+    self._epoch = 0
+    random.seed(seed)
+    self._trav = random.randrange(self._max) if random_start else 0
+    self._epoch_end = (self._trav - 1) % self._max
+
+  def __call__(self):
+    integer_indexes, self._epoch = _get_integer_indices_for_next_batch(
+        batch_indices_start=self._trav,
+        batch_size=self._batch_size,
+        epoch_end=self._epoch_end,
+        array_length=self._max,
+        current_epoch=self._epoch,
+        total_epochs=self._num_epochs)
+
+    self._trav = (integer_indexes[-1] + 1) % self._max
+    feed_dict = {self._index_placeholder: integer_indexes}
+    cols = [
+        column[integer_indexes]
+        for column in self._ordered_dict_of_arrays.values()
+    ]
+    feed_dict.update(dict(zip(self._col_placeholders, cols)))
+    return feed_dict
+
+
+class _PandasFeedFn(object):
+  """Creates feed dictionaries from pandas `DataFrames`."""
+
+  def __init__(self,
+               placeholders,
+               dataframe,
+               batch_size,
+               random_start=False,
+               seed=None,
+               num_epochs=None):
+    if len(placeholders) != len(dataframe.columns) + 1:
+      raise ValueError("Expected {} placeholders; got {}.".format(
+          len(dataframe.columns) + 1, len(placeholders)))
+    self._index_placeholder = placeholders[0]
+    self._col_placeholders = placeholders[1:]
+    self._dataframe = dataframe
+    self._max = len(dataframe)
+    self._batch_size = batch_size
+    self._num_epochs = num_epochs
+    self._epoch = 0
+    random.seed(seed)
+    self._trav = random.randrange(self._max) if random_start else 0
+    self._epoch_end = (self._trav - 1) % self._max
+
+  def __call__(self):
+    integer_indexes, self._epoch = _get_integer_indices_for_next_batch(
+        batch_indices_start=self._trav,
+        batch_size=self._batch_size,
+        epoch_end=self._epoch_end,
+        array_length=self._max,
+        current_epoch=self._epoch,
+        total_epochs=self._num_epochs)
+
+    self._trav = (integer_indexes[-1] + 1) % self._max
+    result = self._dataframe.iloc[integer_indexes]
+    cols = [result[col].values for col in result.columns]
+    feed_dict = dict(zip(self._col_placeholders, cols))
+    feed_dict[self._index_placeholder] = result.index.values
+    return feed_dict
+
+
+class _GeneratorFeedFn(object):
+  """Creates feed dictionaries from `Generator` of `dicts` of numpy arrays."""
+
+  def __init__(self,
+               placeholders,
+               generator,
+               batch_size,
+               random_start=False,
+               seed=None,
+               num_epochs=None,
+               pad_value=None):
+    first_sample = next(generator())
+    if len(placeholders) != len(first_sample):
+      raise ValueError("Expected {} placeholders; got {}.".format(
+          len(first_sample), len(placeholders)))
+    self._keys = sorted(list(first_sample.keys()))
+    self._col_placeholders = placeholders
+    self._generator_function = generator
+    self._iterator = generator()
+    self._batch_size = batch_size
+    self._num_epochs = num_epochs
+    self._epoch = 0
+    self._pad_value = pad_value
+    random.seed(seed)
+
+  def __call__(self):
+    if self._num_epochs and self._epoch >= self._num_epochs:
+      raise tf.errors.OutOfRangeError(
+          None, None, "Already emitted %s epochs." % self._epoch)
+    list_dict = {}
+    list_dict_size = 0
+    while list_dict_size < self._batch_size:
+      try:
+        data_row = next(self._iterator)
+      except StopIteration:
+        self._epoch += 1
+        self._iterator = self._generator_function()
+        data_row = next(self._iterator)
+      for index, key in enumerate(self._keys):
+        if key not in data_row.keys():
+          raise KeyError("key mismatch between dicts emitted by GenFun "
+                         "Expected {} keys; got {}".format(
+                             self._keys, data_row.keys()))
+        list_dict.setdefault(self._col_placeholders[index],
+                             list()).append(data_row[key])
+        list_dict_size += 1
+
+    if self._pad_value is not None:
+      feed_dict = {
+          key: np.asarray(_pad_if_needed(item, self._pad_value))
+          for key, item in list(list_dict.items())
+      }
+    else:
+      feed_dict = {
+          key: np.asarray(item) for key, item in list(list_dict.items())
+      }
+    return feed_dict
+
+
+def _enqueue_data(data,
+                  capacity,
+                  shuffle=False,
+                  min_after_dequeue=None,
+                  num_threads=1,
+                  seed=None,
+                  name="enqueue_input",
+                  enqueue_size=1,
+                  num_epochs=None,
+                  pad_value=None):
+  """Creates a queue filled from a numpy array or pandas `DataFrame`.
+
+    Returns a queue filled with the rows of the given (`OrderedDict` of) array
+    or `DataFrame`. In the case of a pandas `DataFrame`, the first enqueued
+    `Tensor` corresponds to the index of the `DataFrame`. For (`OrderedDict` of)
+    numpy arrays, the first enqueued `Tensor` contains the row number.
+
+  Args:
+    data: a numpy `ndarray`, `OrderedDict` of numpy arrays, or a generator
+      yielding `dict`s of numpy arrays or pandas `DataFrame` that will be read
+      into the queue.
+    capacity: the capacity of the queue.
+    shuffle: whether or not to shuffle the rows of the array.
+    min_after_dequeue: minimum number of elements that can remain in the queue
+      after a dequeue operation. Only used when `shuffle` is true. If not set,
+      defaults to `capacity` / 4.
+    num_threads: number of threads used for reading and enqueueing.
+    seed: used to seed shuffling and reader starting points.
+    name: a scope name identifying the data.
+    enqueue_size: the number of rows to enqueue per step.
+    num_epochs: limit enqueuing to a specified number of epochs, if provided.
+    pad_value: default value for dynamic padding of data samples, if provided.
+
+  Returns:
+    A queue filled with the rows of the given (`OrderedDict` of) array or
+      `DataFrame`.
+
+  Raises:
+    TypeError: `data` is not a Pandas `DataFrame`, an `OrderedDict` of numpy
+      arrays, a numpy `ndarray`, or a generator producing these.
+    NotImplementedError: padding and shuffling data at the same time.
+    NotImplementedError: padding usage with non generator data type.
+  """
+  with ops.name_scope(name):
+    if isinstance(data, np.ndarray):
+      types = [tf.dtypes.int64, tf.dtypes.as_dtype(data.dtype)]
+      queue_shapes = [(), data.shape[1:]]
+      get_feed_fn = _ArrayFeedFn
+    elif isinstance(data, collections.OrderedDict):
+      types = [tf.dtypes.int64
+              ] + [tf.dtypes.as_dtype(col.dtype) for col in data.values()]
+      queue_shapes = [()] + [col.shape[1:] for col in data.values()]
+      get_feed_fn = _OrderedDictNumpyFeedFn
+    elif isinstance(data, tp.FunctionType):
+      x_first_el = six.next(data())
+      x_first_keys = sorted(x_first_el.keys())
+      x_first_values = [x_first_el[key] for key in x_first_keys]
+      types = [tf.dtypes.as_dtype(col.dtype) for col in x_first_values]
+      queue_shapes = [col.shape for col in x_first_values]
+      get_feed_fn = _GeneratorFeedFn
+    elif HAS_PANDAS and isinstance(data, pd.DataFrame):
+      types = [
+          tf.dtypes.as_dtype(dt)
+          for dt in [data.index.dtype] + list(data.dtypes)
+      ]
+      queue_shapes = [() for _ in types]
+      get_feed_fn = _PandasFeedFn
+    else:
+      raise TypeError(
+          "data must be either a numpy array or pandas DataFrame if pandas is "
+          "installed; got {}".format(type(data).__name__))
+
+    pad_data = pad_value is not None
+    if pad_data and get_feed_fn is not _GeneratorFeedFn:
+      raise NotImplementedError(
+          "padding is only available with generator usage")
+    if shuffle and pad_data:
+      raise NotImplementedError(
+          "padding and shuffling data at the same time is not implemented")
+
+    # TODO(jamieas): TensorBoard warnings for all warnings below once available.
+
+    if num_threads > 1 and num_epochs is not None:
+      tf.compat.v1.logging.warn(
+          "enqueue_data was called with num_epochs and num_threads > 1. "
+          "num_epochs is applied per thread, so this will produce more "
+          "epochs than you probably intend. "
+          "If you want to limit epochs, use one thread.")
+
+    if shuffle and num_threads > 1 and num_epochs is not None:
+      tf.compat.v1.logging.warn(
+          "enqueue_data was called with shuffle=True, num_threads > 1, and "
+          "num_epochs. This will create multiple threads, all reading the "
+          "array/dataframe in order adding to the same shuffling queue; the "
+          "results will likely not be sufficiently shuffled.")
+
+    if not shuffle and num_threads > 1:
+      tf.compat.v1.logging.warn(
+          "enqueue_data was called with shuffle=False and num_threads > 1. "
+          "This will create multiple threads, all reading the "
+          "array/dataframe in order. If you want examples read in order, use"
+          " one thread; if you want multiple threads, enable shuffling.")
+
+    if shuffle:
+      min_after_dequeue = int(
+          capacity / 4 if min_after_dequeue is None else min_after_dequeue)
+      queue = tf.queue.RandomShuffleQueue(
+          capacity,
+          min_after_dequeue,
+          dtypes=types,
+          shapes=queue_shapes,
+          seed=seed)
+    elif pad_data:
+      min_after_dequeue = 0  # just for the summary text
+      queue_shapes = list(
+          map(lambda x: tuple(list(x[:-1]) + [None])
+              if len(x) > 0 else x, queue_shapes))
+      queue = tf.queue.PaddingFIFOQueue(
+          capacity, dtypes=types, shapes=queue_shapes)
+    else:
+      min_after_dequeue = 0  # just for the summary text
+      queue = tf.queue.FIFOQueue(capacity, dtypes=types, shapes=queue_shapes)
+
+    enqueue_ops = []
+    feed_fns = []
+
+    for i in range(num_threads):
+      # Note the placeholders have no shapes, so they will accept any
+      # enqueue_size.  enqueue_many below will break them up.
+      placeholders = [tf.compat.v1.placeholder(t) for t in types]
+
+      enqueue_ops.append(queue.enqueue_many(placeholders))
+      seed_i = None if seed is None else (i + 1) * seed
+
+      if not pad_data:
+        feed_fns.append(
+            get_feed_fn(
+                placeholders,
+                data,
+                enqueue_size,
+                random_start=shuffle,
+                seed=seed_i,
+                num_epochs=num_epochs))
+      else:
+        feed_fns.append(
+            get_feed_fn(
+                placeholders,
+                data,
+                enqueue_size,
+                random_start=shuffle,
+                seed=seed_i,
+                num_epochs=num_epochs,
+                pad_value=pad_value))
+
+    runner = fqr._FeedingQueueRunner(  # pylint: disable=protected-access
+        queue=queue,
+        enqueue_ops=enqueue_ops,
+        feed_fns=feed_fns)
+    tf.compat.v1.train.queue_runner.add_queue_runner(runner)
+
+    full = (
+        tf.cast(
+            tf.math.maximum(0,
+                            queue.size() - min_after_dequeue),
+            tf.dtypes.float32) * (1. / (capacity - min_after_dequeue)))
+    # Note that name contains a '/' at the end so we intentionally do not place
+    # a '/' after %s below.
+    summary_name = (
+        "queue/%sfraction_over_%d_of_%d_full" %
+        (queue.name, min_after_dequeue, capacity - min_after_dequeue))
+    tf.compat.v1.summary.scalar(summary_name, full)
+    return queue
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_queue_runner.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_queue_runner.py
new file mode 100644
index 00000000..fbab7a2e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/inputs/queues/feeding_queue_runner.py
@@ -0,0 +1,184 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""A `QueueRunner` that takes a feed function as an argument."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import threading
+import tensorflow as tf
+
+
+class _FeedingQueueRunner(tf.compat.v1.train.queue_runner.QueueRunner):
+  """A queue runner that allows the feeding of values such as numpy arrays."""
+
+  def __init__(self,
+               queue=None,
+               enqueue_ops=None,
+               close_op=None,
+               cancel_op=None,
+               feed_fns=None,
+               queue_closed_exception_types=None):
+    """Initialize the queue runner.
+
+    For further documentation, see `queue_runner.py`. Note that
+    `FeedingQueueRunner` does not support construction from protobuffer nor
+    serialization to protobuffer.
+
+    Args:
+      queue: A `Queue`.
+      enqueue_ops: List of enqueue ops to run in threads later.
+      close_op: Op to close the queue. Pending enqueue ops are preserved.
+      cancel_op: Op to close the queue and cancel pending enqueue ops.
+      feed_fns: a list of functions that return a dictionary mapping fed
+        `Tensor`s to values. Must be the same length as `enqueue_ops`.
+      queue_closed_exception_types: Optional tuple of Exception types that
+        indicate that the queue has been closed when raised during an enqueue
+        operation.  Defaults to `(tf.errors.OutOfRangeError,
+        tf.errors.CancelledError)`.
+
+    Raises:
+      ValueError: `feed_fns` is not `None` and has different length than
+        `enqueue_ops`.
+    """
+    if queue_closed_exception_types is None:
+      queue_closed_exception_types = (tf.errors.OutOfRangeError,
+                                      tf.errors.CancelledError)
+    super(_FeedingQueueRunner, self).__init__(
+        queue,
+        enqueue_ops,
+        close_op,
+        cancel_op,
+        queue_closed_exception_types=queue_closed_exception_types)
+    if feed_fns is None:
+      self._feed_fns = [None for _ in enqueue_ops]
+    else:
+      if len(feed_fns) != len(enqueue_ops):
+        raise ValueError(
+            "If feed_fns is not None, it must have the same length as "
+            "enqueue_ops.")
+      self._feed_fns = feed_fns
+
+  # pylint: disable=broad-except
+  def _run(self, sess, enqueue_op, feed_fn, coord=None):
+    """Execute the enqueue op in a loop, close the queue in case of error.
+
+    Args:
+      sess: A `Session`.
+      enqueue_op: The `Operation` to run.
+      feed_fn: the feed function to pass to `sess.run`.
+      coord: Optional `Coordinator` object for reporting errors and checking for
+        stop conditions.
+    """
+    # TODO(jamieas): Reduce code duplication with `QueueRunner`.
+    if coord:
+      coord.register_thread(threading.current_thread())
+    decremented = False
+    try:
+      while True:
+        if coord and coord.should_stop():
+          break
+        try:
+          feed_dict = None if feed_fn is None else feed_fn()
+          sess.run(enqueue_op, feed_dict=feed_dict)
+        except (tf.errors.OutOfRangeError, tf.errors.CancelledError):
+          # This exception indicates that a queue was closed.
+          with self._lock:
+            self._runs_per_session[sess] -= 1
+            decremented = True
+            if self._runs_per_session[sess] == 0:
+              try:
+                sess.run(self._close_op)
+              except Exception as e:
+                # Intentionally ignore errors from close_op.
+                tf.compat.v1.logging.vlog(1, "Ignored exception: %s", str(e))
+            return
+    except Exception as e:
+      # This catches all other exceptions.
+      if coord:
+        coord.request_stop(e)
+      else:
+        tf.compat.v1.logging.error("Exception in QueueRunner: %s", str(e))
+        with self._lock:
+          self._exceptions_raised.append(e)
+        raise
+    finally:
+      # Make sure we account for all terminations: normal or errors.
+      if not decremented:
+        with self._lock:
+          self._runs_per_session[sess] -= 1
+
+  def create_threads(self, sess, coord=None, daemon=False, start=False):
+    """Create threads to run the enqueue ops for the given session.
+
+    This method requires a session in which the graph was launched.  It creates
+    a list of threads, optionally starting them.  There is one thread for each
+    op passed in `enqueue_ops`.
+
+    The `coord` argument is an optional coordinator, that the threads will use
+    to terminate together and report exceptions.  If a coordinator is given,
+    this method starts an additional thread to close the queue when the
+    coordinator requests a stop.
+
+    If previously created threads for the given session are still running, no
+    new threads will be created.
+
+    Args:
+      sess: A `Session`.
+      coord: Optional `Coordinator` object for reporting errors and checking
+        stop conditions.
+      daemon: Boolean.  If `True` make the threads daemon threads.
+      start: Boolean.  If `True` starts the threads.  If `False` the caller must
+        call the `start()` method of the returned threads.
+
+    Returns:
+      A list of threads.
+    """
+    with self._lock:
+      try:
+        if self._runs_per_session[sess] > 0:
+          # Already started: no new threads to return.
+          return []
+      except KeyError:
+        # We haven't seen this session yet.
+        pass
+      self._runs_per_session[sess] = len(self._enqueue_ops)
+      self._exceptions_raised = []
+
+    ret_threads = [
+        threading.Thread(target=self._run, args=(sess, op, feed_fn, coord))
+        for op, feed_fn in zip(self._enqueue_ops, self._feed_fns)
+    ]
+    if coord:
+      ret_threads.append(
+          threading.Thread(
+              target=self._close_on_stop, args=(sess, self._cancel_op, coord)))
+    for t in ret_threads:
+      if daemon:
+        t.daemon = True
+      if start:
+        t.start()
+    return ret_threads
+
+  def _init_from_proto(self, queue_runner_def):
+    raise NotImplementedError(
+        "{} does not support initialization from proto.".format(
+            type(self).__name__))
+
+  def to_proto(self):
+    raise NotImplementedError(
+        "{} does not support serialization to proto.".format(
+            type(self).__name__))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/keras.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/keras.py
new file mode 100644
index 00000000..b7b265ee
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/keras.py
@@ -0,0 +1,748 @@
+# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+# pylint: disable=protected-access
+"""Home of estimator related functions."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import re
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import backend as K
+from tensorflow.python.keras import models
+from tensorflow.python.training.tracking import graph_view
+from tensorflow.python.training.tracking import util as trackable_util
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.export import export_lib
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+_DEFAULT_SERVING_KEY = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+
+try:
+  from tensorflow.python.keras.engine import training_utils_v1 as training_utils  # pylint:disable=g-import-not-at-top
+except ImportError:
+  from tensorflow.python.keras.engine import training_utils  # pylint:disable=g-import-not-at-top
+
+class FormattedKeyError(KeyError):
+  """KeyError with formatted error message.
+
+  Python's `KeyError` has special casing around formatting
+  (see https://bugs.python.org/issue2651). Use this class when the error
+  message has newlines and other special format characters.
+
+  Needed by https://github.com/tensorflow/tensorflow/issues/36857.
+  """
+
+  def __init__(self, message):
+    self.message = message
+
+  def __str__(self):
+    return self.message
+
+
+def _cast_tensor_to_floatx(x):
+  """Cast tensor to keras's floatx dtype if it is not already the same dtype."""
+  if x.dtype == K.floatx():
+    return x
+  else:
+    return tf.cast(x, K.floatx())
+
+
+def _convert_tensor(x):
+  """Create or cast tensor if needed."""
+  if not tf.is_tensor(x):
+    # x is a numpy array
+    x = tf.compat.v1.convert_to_tensor_or_sparse_tensor(x)
+  return x
+
+
+def _any_weight_initialized(keras_model):
+  """Check if any weights has been initialized in the Keras model.
+
+  Args:
+    keras_model: An instance of compiled keras model.
+
+  Returns:
+    boolean, True if at least one weight has been initialized, else False.
+    Currently keras initialize all weights at get_session().
+  """
+  if keras_model is None:
+    return False
+  if ops.executing_eagerly_outside_functions():
+    return True
+  for layer in keras_model.layers:
+    for weight in layer.weights:
+      if hasattr(weight, '_keras_initialized'):
+        return True
+  return False
+
+
+def _convert_estimator_io_to_keras(keras_model, features, labels):
+  """Converts estimator features and labels to keras input and target tensors.
+
+  Args:
+    keras_model: a compiled `tf.keras.Model` instance, used to determine the
+      order of the returned lists.
+    features: Dict of tensors or `None`.
+    labels: Dict of tensors, a single tensor, or `None`.
+
+  Returns:
+    Tuple of (
+      list of input tensors or `None`,
+      list of target tensors or `None`,
+      list of sample weight tensors or `None`)
+    The order of tensors is determined by the order set in the keras model.
+  """
+
+  def _to_ordered_tensor_list(obj, key_order, obj_name, order_name):
+    """Convert obj to an ordered list of tensors.
+
+    Args:
+      obj: List, dict, or single tensor. May be `None`.
+      key_order: List of strings with the order to return (used if obj is a
+        dict).
+      obj_name: String name of object (e.g. "features" or "labels")
+      order_name: String name of the key order (e.g. "inputs" or "outputs")
+
+    Returns:
+      List of tensors, or `None`
+
+    Raises:
+      KeyError: If obj has invalid keys.
+    """
+    if obj is None:
+      return None
+    elif isinstance(obj, (list, tuple)):
+      return [_convert_tensor(x) for x in obj]
+    elif isinstance(obj, dict):
+      # Ensure that keys in key_order are contained in obj keys.
+      # One can provide more data keys described in obj, as long as the keys
+      # requested by model are provided.
+      different_keys = set(key_order) - set(obj.keys())
+
+      if different_keys:
+        raise FormattedKeyError(
+            'The dictionary passed into {obj_name} does not cover requested '
+            '{order_name} keys defined in the keras model.'
+            '\n\tExpected keys: {order_keys}'
+            '\n\t{obj_name} keys: {obj_keys}'
+            '\n\tMissed keys: {different_keys}'.format(
+                order_name=order_name,
+                order_keys=set(key_order),
+                obj_name=obj_name,
+                obj_keys=set(obj.keys()),
+                different_keys=different_keys))
+
+      return [_convert_tensor(obj[key]) for key in key_order]
+    else:  # Assume obj is a tensor.
+      return [_convert_tensor(obj)]
+
+  features, sample_weight_tensors = _extract_sample_weight_tensors(features)
+  input_names = None
+  output_names = None
+  if isinstance(features, dict):
+    input_names = (
+        keras_model.input_names if keras_model._is_graph_network else
+        ['input_%d' % i for i in range(1,
+                                       len(features) + 1)])
+  if isinstance(labels, dict):
+    output_names = (
+        keras_model.output_names if keras_model._is_graph_network else
+        ['output_%d' % i for i in range(1,
+                                        len(labels) + 1)])
+
+  if isinstance(keras_model.inputs, dict):
+    # Keep input tensors as a dict if keras_model is built with dict input.
+    input_tensors = {
+        k: _convert_tensor(features[k])
+        for (k, v) in keras_model.inputs.items()
+    }
+  elif keras_model.inputs is None and isinstance(features, dict):
+    # Keep input tensors as a dict if keras_model input structure is unknown.
+    input_tensors = {k: _convert_tensor(v) for (k, v) in features.items()}
+  else:
+    # converting input tensors into sorted list.
+    input_tensors = _to_ordered_tensor_list(features, input_names, 'features',
+                                            'inputs')
+  target_tensors = _to_ordered_tensor_list(labels, output_names, 'labels',
+                                           'outputs')
+
+  return input_tensors, target_tensors, sample_weight_tensors
+
+
+def _extract_sample_weight_tensors(features):
+  if isinstance(features, dict) and set(
+      features.keys()) == {'features', 'sample_weights'}:
+    feature_tensor = features['features']
+    sample_weight_tensors = features['sample_weights']
+  else:
+    feature_tensor = features
+    sample_weight_tensors = None
+  return feature_tensor, sample_weight_tensors
+
+
+def _clone_and_build_model(mode,
+                           keras_model,
+                           custom_objects,
+                           features=None,
+                           labels=None,
+                           optimizer_config=None):
+  """Clone and build the given keras_model.
+
+  Args:
+    mode: training mode.
+    keras_model: an instance of compiled keras model.
+    custom_objects: Dictionary for custom objects.
+    features: Dict of tensors.
+    labels: Dict of tensors, or single tensor instance.
+    optimizer_config: Optimizer config dictionary, returned by
+      `optimizer.get_config()`. This is used when cloning a model with an
+      optimizer. Since `_clone_and_build_model` is called in a different graph
+      and session from the model, `optimizer.get_config()` may raise an error
+      during the attempt to serialize the optimizer hyperparameter values.
+
+  Returns:
+    The newly built model.
+  """
+  # Set to True during training, False for inference or testing.
+  K.set_learning_phase(mode == ModeKeys.TRAIN)
+  input_tensors, target_tensors, sample_weight_tensors = (
+      _convert_estimator_io_to_keras(keras_model, features, labels))
+
+  compile_clone = (mode != ModeKeys.PREDICT)
+
+  global_step = None
+  if compile_clone:
+    # Set iterations to the global step created by tf.train.create_global_step()
+    # which is automatically run in the estimator framework.
+    global_step = tf.compat.v1.train.get_or_create_global_step()
+    K.track_variable(global_step)
+
+  clone = models.clone_and_build_model(
+      keras_model,
+      input_tensors,
+      target_tensors,
+      custom_objects,
+      compile_clone=compile_clone,
+      in_place_reset=(not keras_model._is_graph_network),
+      optimizer_iterations=global_step,
+      optimizer_config=optimizer_config)
+
+  if sample_weight_tensors is not None:
+    sample_weight_tensors = training_utils.standardize_sample_weights(
+        sample_weight_tensors, clone.output_names)
+    # Update calculated loss (model.total_loss) to include sample weights.
+    clone._compile_weights_loss_and_weighted_metrics(sample_weight_tensors)
+  return clone
+
+
+def _convert_keras_metrics_to_estimator(model, metric_names_map=None):
+  """Convert metrics from a Keras model to ops used by the Estimator framework.
+
+  Args:
+    model: A `tf.keras.Model` object.
+    metric_names_map: Optional dictionary mapping Keras model output metric
+      names to custom names.
+
+  Returns:
+    Dictionary mapping metric names to tuples of (value, update) ops. May return
+    `None` if the model does not contain any metrics.
+  """
+  if not getattr(model, '_compile_metrics', None):
+    return None
+
+  # We are not using model.metrics here because we want to exclude the metrics
+  # added using `add_metric` API.
+  compiled_metrics = model._compile_metric_functions
+
+  if metric_names_map:
+    custom_map_keys = set(metric_names_map.keys())
+    expected_keys = {m.name for m in compiled_metrics}
+    unknown = expected_keys.difference(custom_map_keys)
+    if unknown:
+      raise ValueError(
+          'Invalid `metric_names_map`. '
+          'The following keras model metric names:"{}" do not exist in '
+          'the `metric_names_map` dictionary'.format(list(unknown)))
+
+    extra = custom_map_keys.difference(expected_keys)
+    if extra:
+      raise ValueError('Invalid `metric_names_map`. '
+                       'There are unexpected keys in the `metric_names_map` '
+                       'dictionary. Expected keys: {}, Received: {}'.format(
+                           list(expected_keys), list(extra)))
+
+    return {metric_names_map[m.name]: m for m in compiled_metrics}
+  else:
+    return {m.name: m for m in compiled_metrics}
+
+
+def _create_keras_model_fn(keras_model,
+                           custom_objects=None,
+                           save_object_ckpt=False,
+                           metric_names_map=None,
+                           export_outputs=None):
+  """Creates model_fn for keras Estimator.
+
+  Args:
+    keras_model: an instance of compiled keras model.
+    custom_objects: Dictionary for custom objects.
+    save_object_ckpt: Whether to save an object-based checkpoint.
+    metric_names_map: Optional dictionary mapping Keras model output metric
+      names to custom names.
+    export_outputs: Optional dictionary mapping custom names to a subclass of
+      `tf.estimator.export.ExportOutput`.
+
+  Returns:
+    The model_fn for a keras Estimator.
+  """
+  # Get optimizer config in the current context (since model_fn is called in the
+  # estimator graph and session). OptimizerV2 objects serialize variable/tensor
+  # hyperparameters in their configs, resulting to wrong-session errors during
+  # model cloning.
+  try:
+    if isinstance(keras_model.optimizer, (tuple, list)):
+      optimizer_config = [opt.get_config() for opt in keras_model.optimizer]
+    else:
+      optimizer_config = keras_model.optimizer.get_config()
+  except (NotImplementedError, AttributeError):
+    # TFOptimizers and other custom optimizers do not have a config.
+    optimizer_config = None
+
+  def model_fn(features, labels, mode):
+    """model_fn for keras Estimator."""
+    model = _clone_and_build_model(
+        mode=mode,
+        keras_model=keras_model,
+        custom_objects=custom_objects,
+        features=features,
+        labels=labels,
+        optimizer_config=optimizer_config)
+    model_output_names = []
+    # We need to make sure that the output names of the last layer in the model
+    # is the same for each of the cloned models. This is required for mirrored
+    # strategy when we call regroup.
+    if tf.distribute.has_strategy():
+      for name in model.output_names:
+        name = re.compile(r'_\d$').sub('', name)
+        model_output_names.append(name)
+    else:
+      model_output_names = model.output_names
+
+    # Get inputs to EstimatorSpec
+    predictions = dict(zip(model_output_names, model.outputs))
+
+    loss = None
+    train_op = None
+    eval_metric_ops = None
+
+    # Set loss and metric only during train and evaluate.
+    if mode is not ModeKeys.PREDICT:
+      if mode is ModeKeys.TRAIN:
+        model._make_train_function()  # pylint: disable=protected-access
+      else:
+        model._make_test_function()  # pylint: disable=protected-access
+      loss = model.total_loss
+
+      eval_metric_ops = _convert_keras_metrics_to_estimator(
+          model, metric_names_map)
+
+    # Set train_op only during train.
+    if mode is ModeKeys.TRAIN:
+      train_op = model.train_function.updates_op
+
+    if (not model._is_graph_network and
+        hasattr(keras_model, '_original_attributes_cache') and
+        keras_model._original_attributes_cache is not None):
+      # To avoid `model_fn` being destructive for the initial model argument.
+      models.in_place_subclassed_model_state_restoration(keras_model)
+
+    scaffold = None
+    if save_object_ckpt:
+      model._track_trackable(tf.compat.v1.train.get_global_step(),
+                             'estimator_global_step')
+      # Create saver that maps variable names to object-checkpoint keys.
+      object_graph = graph_view.ObjectGraphView(model)
+      var_list = object_graph.frozen_saveable_objects()
+      saver = tf.compat.v1.train.Saver(var_list=var_list, sharded=True)
+      saver._object_restore_saver = trackable_util.frozen_saver(model)
+      scaffold = tf.compat.v1.train.Scaffold(saver=saver)
+
+    final_export_outputs = {
+        _DEFAULT_SERVING_KEY: export_lib.PredictOutput(predictions)
+    }
+    if export_outputs is not None:
+      different_keys = set(export_outputs.keys()) - set(model.output_names)
+      if different_keys:
+        raise FormattedKeyError(
+            'The list passed into {obj_name} does not cover requested '
+            '{order_name} keys defined in the keras model.'
+            '\n\tExpected keys: {order_keys}'
+            '\n\t{obj_name} keys: {obj_keys}'
+            '\n\tMissed keys: {different_keys}'.format(
+                order_name=export_outputs,
+                order_keys=set(export_outputs.keys()),
+                obj_name=model.output_names,
+                obj_keys=set(model.output_names),
+                different_keys=different_keys))
+      for key, export_output_cls in export_outputs.items():
+        final_export_outputs[key] = export_output_cls(predictions[key])
+
+    return model_fn_lib.EstimatorSpec(
+        mode=mode,
+        predictions=predictions,
+        loss=loss,
+        train_op=train_op,
+        eval_metric_ops=eval_metric_ops,
+        export_outputs=final_export_outputs,
+        scaffold=scaffold)
+
+  return model_fn
+
+
+def _save_first_checkpoint(keras_model, custom_objects, config,
+                           save_object_ckpt):
+  """Save first checkpoint for the keras Estimator.
+
+  Args:
+    keras_model: an instance of compiled keras model.
+    custom_objects: Dictionary for custom objects.
+    config: Estimator config.
+    save_object_ckpt: Whether to save an object-based checkpoint.
+
+  Returns:
+    The path where keras model checkpoint is saved.
+  """
+  # save checkpoint into subdirectory to allow warm start
+  keras_model_dir = os.path.join(config.model_dir, 'keras')
+  # Load weights and save to checkpoint if there is no checkpoint
+  latest_path = tf.train.latest_checkpoint(keras_model_dir)
+  if not latest_path:
+    keras_weights = None
+    if _any_weight_initialized(keras_model):
+      keras_weights = keras_model.get_weights()
+    if not tf.compat.v1.gfile.IsDirectory(keras_model_dir):
+      tf.compat.v1.gfile.MakeDirs(keras_model_dir)
+    with tf.Graph().as_default():
+      tf.compat.v1.random.set_random_seed(config.tf_random_seed)
+      tf.compat.v1.train.create_global_step()
+      model = _clone_and_build_model(ModeKeys.TRAIN, keras_model,
+                                     custom_objects)
+
+      # Init the train_function outside of the context of session. This is due
+      # to the fact that train function will update the graph by adding backprop
+      # parts. This will potentially trying to update the node in forward graph
+      # which will fail if it is done within same session.
+      # Always create the train_function here since the model is just cloned.
+      # See https://github.com/tensorflow/tensorflow/issues/27750 for details.
+      model._make_train_function()  # pylint: disable=protected-access
+
+      # save to checkpoint
+      with tf.compat.v1.Session(config=config.session_config) as sess:
+        if keras_weights:
+          model.set_weights(keras_weights)
+        # model._make_train_function() will potentially create the optimizer
+        # variable, which will require another variable initialization.
+        K._initialize_variables(sess)  # pylint: disable=protected-access
+
+        if save_object_ckpt:
+          model._track_trackable(  # pylint: disable=protected-access
+              tf.compat.v1.train.get_global_step(), 'estimator_global_step')
+          latest_path = os.path.join(keras_model_dir, 'keras_model.ckpt')
+          model.save_weights(latest_path)
+        else:
+          saver = tf.compat.v1.train.Saver()
+          latest_path = os.path.join(keras_model_dir, 'keras_model.ckpt')
+          saver.save(sess, latest_path)
+
+  return latest_path
+
+
+def _get_file_from_google_storage(keras_model_path, model_dir):
+  """Get file from google storage and download to local file.
+
+  Args:
+    keras_model_path: a google storage path for compiled keras model.
+    model_dir: the directory from estimator config.
+
+  Returns:
+    The path where keras model is saved.
+
+  Raises:
+    ValueError: if storage object name does not end with .h5.
+  """
+  try:
+    from google.cloud import storage  # pylint:disable=g-import-not-at-top
+  except ImportError:
+    raise TypeError('Could not save model to Google cloud storage; please '
+                    'install `google-cloud-storage` via '
+                    '`pip install google-cloud-storage`.')
+  storage_client = storage.Client()
+  path, blob_name = os.path.split(keras_model_path)
+  _, bucket_name = os.path.split(path)
+  keras_model_dir = os.path.join(model_dir, 'keras')
+  if not tf.compat.v1.gfile.Exists(keras_model_dir):
+    tf.compat.v1.gfile.MakeDirs(keras_model_dir)
+  file_name = os.path.join(keras_model_dir, 'keras_model.h5')
+  try:
+    blob = storage_client.get_bucket(bucket_name).blob(blob_name)
+    blob.download_to_filename(file_name)
+  except:
+    raise ValueError('Failed to download keras model, please check '
+                     'environment variable GOOGLE_APPLICATION_CREDENTIALS '
+                     'and model path storage.googleapis.com/{bucket}/{object}.')
+  tf.compat.v1.logging.info('Saving model to {}'.format(file_name))
+  del storage_client
+  return file_name
+
+
+# LINT.IfChange
+# TODO(b/139699640): let model_to_estimator only rely on public Keras APIs.
+def model_to_estimator(keras_model=None,
+                       keras_model_path=None,
+                       custom_objects=None,
+                       model_dir=None,
+                       config=None,
+                       checkpoint_format=None,
+                       use_v2_estimator=False,
+                       metric_names_map=None,
+                       export_outputs=None):
+  # LINT.ThenChange(//tensorflow/python/keras/estimator/__init__.py)
+  """Constructs an `Estimator` instance from given keras model.
+
+  If you use infrastructure or other tooling that relies on Estimators, you can
+  still build a Keras model and use model_to_estimator to convert the Keras
+  model to an Estimator for use with downstream systems.
+
+  For usage example, please see:
+  [Creating estimators from Keras
+  Models](https://www.tensorflow.org/guide/estimator#create_an_estimator_from_a_keras_model).
+
+  Sample Weights:
+  Estimators returned by `model_to_estimator` are configured so that they can
+  handle sample weights (similar to `keras_model.fit(x, y, sample_weights)`).
+
+  To pass sample weights when training or evaluating the Estimator, the first
+  item returned by the input function should be a dictionary with keys
+  `features` and `sample_weights`. Example below:
+
+  ```python
+  keras_model = tf.keras.Model(...)
+  keras_model.compile(...)
+
+  estimator = tf.keras.estimator.model_to_estimator(keras_model)
+
+  def input_fn():
+    return dataset_ops.Dataset.from_tensors(
+        ({'features': features, 'sample_weights': sample_weights},
+         targets))
+
+  estimator.train(input_fn, steps=1)
+  ```
+
+  Example with customized export signature:
+  ```python
+  inputs = {'a': tf.keras.Input(..., name='a'),
+            'b': tf.keras.Input(..., name='b')}
+  outputs = {'c': tf.keras.layers.Dense(..., name='c')(inputs['a']),
+             'd': tf.keras.layers.Dense(..., name='d')(inputs['b'])}
+  keras_model = tf.keras.Model(inputs, outputs)
+  keras_model.compile(...)
+  export_outputs = {'c': tf.estimator.export.RegressionOutput,
+                    'd': tf.estimator.export.ClassificationOutput}
+
+  estimator = tf.keras.estimator.model_to_estimator(
+      keras_model, export_outputs=export_outputs)
+
+  def input_fn():
+    return dataset_ops.Dataset.from_tensors(
+        ({'features': features, 'sample_weights': sample_weights},
+         targets))
+
+  estimator.train(input_fn, steps=1)
+  ```
+
+  Note: We do not support creating weighted metrics in Keras and converting them
+  to weighted metrics in the Estimator API using `model_to_estimator`.
+  You will have to create these metrics directly on the estimator spec using the
+  `add_metrics` function.
+
+  Args:
+    keras_model: A compiled Keras model object. This argument is mutually
+      exclusive with `keras_model_path`. Estimator's `model_fn` uses the
+      structure of the model to clone the model. Defaults to `None`.
+    keras_model_path: Path to a compiled Keras model saved on disk, in HDF5
+      format, which can be generated with the `save()` method of a Keras model.
+      This argument is mutually exclusive with `keras_model`.
+      Defaults to `None`.
+    custom_objects: Dictionary for cloning customized objects. This is
+      used with classes that is not part of this pip package. For example, if
+      user maintains a `relu6` class that inherits from `tf.keras.layers.Layer`,
+      then pass `custom_objects={'relu6': relu6}`. Defaults to `None`.
+    model_dir: Directory to save `Estimator` model parameters, graph, summary
+      files for TensorBoard, etc. If unset a directory will be created with
+      `tempfile.mkdtemp`
+    config: `RunConfig` to config `Estimator`. Allows setting up things in
+      `model_fn` based on configuration such as `num_ps_replicas`, or
+      `model_dir`. Defaults to `None`. If both `config.model_dir` and the
+      `model_dir` argument (above) are specified the `model_dir` **argument**
+      takes precedence.
+    checkpoint_format: Sets the format of the checkpoint saved by the estimator
+      when training. May be `saver` or `checkpoint`, depending on whether to
+      save checkpoints from `tf.compat.v1.train.Saver` or `tf.train.Checkpoint`.
+      The default is `checkpoint`. Estimators use name-based `tf.train.Saver`
+      checkpoints, while Keras models use object-based checkpoints from
+      `tf.train.Checkpoint`. Currently, saving object-based checkpoints from
+      `model_to_estimator` is only supported by Functional and Sequential
+      models.
+    use_v2_estimator: Whether to convert the model to a V2 Estimator or V1
+      Estimator. Defaults to `False`.
+    metric_names_map: Optional dictionary mapping Keras model output metric
+      names to custom names. This can be used to override the default Keras
+      model output metrics names in a multi IO model use case and provide custom
+      names for the `eval_metric_ops` in Estimator.
+      The Keras model metric names can be obtained using `model.metrics_names`
+      excluding any loss metrics such as total loss and output losses.
+      For example, if your Keras model has two outputs `out_1` and `out_2`,
+      with `mse` loss and `acc` metric, then `model.metrics_names` will be
+      `['loss', 'out_1_loss', 'out_2_loss', 'out_1_acc', 'out_2_acc']`.
+      The model metric names excluding the loss metrics will be
+      `['out_1_acc', 'out_2_acc']`.
+    export_outputs: Optional dictionary. This can be used to override the
+      default Keras model output exports in a multi IO model use case and
+      provide custom names for the `export_outputs` in
+      `tf.estimator.EstimatorSpec`. Default is None, which is equivalent to
+      {'serving_default': `tf.estimator.export.PredictOutput`}.
+      A dict `{name: output}` where:
+        * name: An arbitrary name for this output. This becomes the signature
+          name in the SavedModel.
+        * output: an `ExportOutput` object such as `ClassificationOutput`,
+          `RegressionOutput`, or `PredictOutput`. Single-headed models only need
+          to specify one entry in this dictionary. Multi-headed models should
+          specify one entry for each head, one of which must be named using
+          `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`.
+          If no entry is provided, a default `PredictOutput` mapping to
+          `predictions` will be created.
+
+  Returns:
+    An Estimator from given keras model.
+
+  Raises:
+    ValueError: If neither keras_model nor keras_model_path was given.
+    ValueError: If both keras_model and keras_model_path was given.
+    ValueError: If the keras_model_path is a GCS URI.
+    ValueError: If keras_model has not been compiled.
+    ValueError: If an invalid checkpoint_format was given.
+  """
+
+  if not (keras_model or keras_model_path):
+    raise ValueError(
+        'Either `keras_model` or `keras_model_path` needs to be provided.')
+  if keras_model and keras_model_path:
+    raise ValueError(
+        'Please specity either `keras_model` or `keras_model_path`, '
+        'but not both.')
+
+  if keras_model:
+    _assert_valid_model(keras_model, custom_objects)
+
+  config = estimator_lib.maybe_overwrite_model_dir_and_session_config(
+      config, model_dir)
+  if not keras_model:
+    if keras_model_path.startswith(
+        'gs://') or 'storage.googleapis.com' in keras_model_path:
+      keras_model_path = _get_file_from_google_storage(keras_model_path,
+                                                       config.model_dir)
+    tf.compat.v1.logging.info('Loading models from %s', keras_model_path)
+    keras_model = models.load_model(keras_model_path)
+  else:
+    tf.compat.v1.logging.info('Using the Keras model provided.')
+    keras_model = keras_model
+
+  if checkpoint_format is None or checkpoint_format == 'checkpoint':
+    if not (keras_model._is_graph_network or
+            isinstance(keras_model, models.Sequential)):
+      raise ValueError('Object-based checkpoints are currently not supported '
+                       'with subclassed models.')
+    save_object_ckpt = True
+  elif checkpoint_format == 'saver':
+    save_object_ckpt = False
+  else:
+    raise ValueError(
+        'Checkpoint format must be one of "checkpoint" or "saver". Got {}'
+        .format(checkpoint_format))
+
+  if not hasattr(keras_model, 'optimizer') or not keras_model.optimizer:
+    raise ValueError('The given keras model has not been compiled yet. '
+                     'Please compile the model with `model.compile()` '
+                     'before calling `model_to_estimator()`.')
+
+  keras_model_fn = _create_keras_model_fn(
+      keras_model, custom_objects, save_object_ckpt, metric_names_map,
+      export_outputs)
+  if _any_weight_initialized(keras_model):
+    # Warn if config passed to estimator tries to update GPUOptions. If a
+    # session has already been created, the GPUOptions passed to the first
+    # session sticks.
+    if config.session_config.HasField('gpu_options'):
+      tf.compat.v1.logging.warn(
+          'The Keras backend session has already been set. '
+          'The _session_config passed to model_to_estimator will not be used.')
+  else:
+    # Pass the config into keras backend's default session.
+    sess = tf.compat.v1.Session(config=config.session_config)
+    K.set_session(sess)
+
+  warm_start_path = None
+  if keras_model._is_graph_network and config.is_chief:
+    warm_start_path = _save_first_checkpoint(keras_model, custom_objects,
+                                             config, save_object_ckpt)
+  elif keras_model.built:
+    tf.compat.v1.logging.warn(
+        'You are creating an Estimator from a Keras model manually '
+        'subclassed from `Model`, that was already called on some '
+        'inputs (and thus already had weights). We are currently '
+        'unable to preserve the model\'s state (its weights) as '
+        'part of the estimator in this case. Be warned that the '
+        'estimator has been created using a freshly initialized '
+        'version of your model.\n'
+        'Note that this doesn\'t affect the state of the model '
+        'instance you passed as `keras_model` argument.')
+  if use_v2_estimator:
+    estimator_cls = estimator_lib.EstimatorV2
+  else:
+    estimator_cls = estimator_lib.Estimator
+
+  estimator = estimator_cls(
+      keras_model_fn, config=config, warm_start_from=warm_start_path)
+
+  return estimator
+
+
+def _assert_valid_model(model, custom_objects=None):
+  is_subclass = (not model._is_graph_network and
+                 not isinstance(model, models.Sequential))
+  if is_subclass:
+    try:
+      custom_objects = custom_objects or {}
+      with tf.keras.utils.CustomObjectScope(custom_objects):
+        model.__class__.from_config(model.get_config())
+    except NotImplementedError:
+      raise ValueError(
+          'Subclassed `Model`s passed to `model_to_estimator` must '
+          'implement `Model.get_config` and `Model.from_config`.')
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/mode_keys.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/mode_keys.py
new file mode 100644
index 00000000..06c76199
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/mode_keys.py
@@ -0,0 +1,24 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Exporting ModeKeys to tf.estimator namespace."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.saved_model.model_utils.mode_keys import EstimatorModeKeys as ModeKeys
+from tensorflow.python.util.tf_export import estimator_export
+
+estimator_export('estimator.ModeKeys')(ModeKeys)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/model_fn.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/model_fn.py
new file mode 100644
index 00000000..6616cebc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/model_fn.py
@@ -0,0 +1,633 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Classes and methods related to model_fn."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+import six
+import tensorflow as tf
+from tensorflow.python.framework import ops
+from tensorflow.python.keras.metrics import Metric
+from tensorflow.python.saved_model import model_utils as export_utils
+from tensorflow.python.tpu import tensor_tracer
+from tensorflow.python.types import core
+from tensorflow.python.util import function_utils
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator.mode_keys import ModeKeys
+
+LOSS_METRIC_KEY = 'loss'
+AVERAGE_LOSS_METRIC_KEY = 'average_loss'
+
+
+@estimator_export('estimator.EstimatorSpec')
+class EstimatorSpec(
+    collections.namedtuple('EstimatorSpec', [
+        'mode', 'predictions', 'loss', 'train_op', 'eval_metric_ops',
+        'export_outputs', 'training_chief_hooks', 'training_hooks', 'scaffold',
+        'evaluation_hooks', 'prediction_hooks'
+    ])):
+  """Ops and objects returned from a `model_fn` and passed to an `Estimator`.
+
+  `EstimatorSpec` fully defines the model to be run by an `Estimator`.
+  """
+
+  def __new__(cls,
+              mode,
+              predictions=None,
+              loss=None,
+              train_op=None,
+              eval_metric_ops=None,
+              export_outputs=None,
+              training_chief_hooks=None,
+              training_hooks=None,
+              scaffold=None,
+              evaluation_hooks=None,
+              prediction_hooks=None):
+    """Creates a validated `EstimatorSpec` instance.
+
+    Depending on the value of `mode`, different arguments are required. Namely
+
+    * For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
+    * For `mode == ModeKeys.EVAL`: required field is `loss`.
+    * For `mode == ModeKeys.PREDICT`: required fields are `predictions`.
+
+    model_fn can populate all arguments independent of mode. In this case, some
+    arguments will be ignored by an `Estimator`. E.g. `train_op` will be
+    ignored in eval and infer modes. Example:
+
+    ```python
+    def my_model_fn(features, labels, mode):
+      predictions = ...
+      loss = ...
+      train_op = ...
+      return tf.estimator.EstimatorSpec(
+          mode=mode,
+          predictions=predictions,
+          loss=loss,
+          train_op=train_op)
+    ```
+
+    Alternatively, model_fn can just populate the arguments appropriate to the
+    given mode. Example:
+
+    ```python
+    def my_model_fn(features, labels, mode):
+      if (mode == tf.estimator.ModeKeys.TRAIN or
+          mode == tf.estimator.ModeKeys.EVAL):
+        loss = ...
+      else:
+        loss = None
+      if mode == tf.estimator.ModeKeys.TRAIN:
+        train_op = ...
+      else:
+        train_op = None
+      if mode == tf.estimator.ModeKeys.PREDICT:
+        predictions = ...
+      else:
+        predictions = None
+
+      return tf.estimator.EstimatorSpec(
+          mode=mode,
+          predictions=predictions,
+          loss=loss,
+          train_op=train_op)
+    ```
+
+    Args:
+      mode: A `ModeKeys`. Specifies if this is training, evaluation or
+        prediction.
+      predictions: Predictions `Tensor` or dict of `Tensor`.
+      loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
+      train_op: Op for the training step.
+      eval_metric_ops: Dict of metric results keyed by name.
+        The values of the dict can be one of the following: (1) instance of
+          `Metric` class. (2) Results of calling a metric function, namely a
+          `(metric_tensor, update_op)` tuple. `metric_tensor` should be
+          evaluated without any impact on state (typically is a pure computation
+          results based on variables.). For example, it should not trigger the
+          `update_op` or requires any input fetching.
+      export_outputs: Describes the output signatures to be exported to
+        `SavedModel` and used during serving.
+        A dict `{name: output}` where:
+        * name: An arbitrary name for this output.
+        * output: an `ExportOutput` object such as `ClassificationOutput`,
+          `RegressionOutput`, or `PredictOutput`. Single-headed models only need
+          to specify one entry in this dictionary. Multi-headed models should
+          specify one entry for each head, one of which must be named using
+          `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`.
+          If no entry is provided, a default `PredictOutput` mapping to
+          `predictions` will be created.
+      training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to run
+        on the chief worker during training.
+      training_hooks: Iterable of `tf.train.SessionRunHook` objects to run on
+        all workers during training.
+      scaffold: A `tf.train.Scaffold` object that can be used to set
+        initialization, saver, and more to be used in training.
+      evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to run
+        during evaluation.
+      prediction_hooks: Iterable of `tf.train.SessionRunHook` objects to run
+        during predictions.
+
+    Returns:
+      A validated `EstimatorSpec` object.
+
+    Raises:
+      ValueError: If validation fails.
+      TypeError: If any of the arguments is not the expected type.
+    """
+    train_op = _validate_estimator_spec_train_op(train_op, mode)
+    loss = _validate_estimator_spec_loss(loss, mode)
+    predictions = _validate_estimator_spec_predictions(predictions, mode)
+    export_outputs = _validate_estimator_spec_export_outputs(
+        export_outputs, predictions, mode)
+    training_hooks = _validate_estimator_spec_hooks(training_hooks)
+    evaluation_hooks = _validate_estimator_spec_hooks(evaluation_hooks)
+    prediction_hooks = _validate_estimator_spec_hooks(prediction_hooks)
+    training_chief_hooks = _validate_estimator_spec_hooks(training_chief_hooks)
+    eval_metric_ops = _validate_eval_metric_ops(eval_metric_ops)
+    scaffold = _validate_scaffold(scaffold)
+
+    # By default, Tensor Tracer is not enabled and the block below is an no-op.
+    if tensor_tracer.TensorTracer.is_enabled() and train_op is not None:
+      # If Tensor Tracer is enabled via environment flags, loss and train_op
+      # will be used to determine the execution path that will be traced. A
+      # `tf.identity` of loss that enforces the execution of tracing ops will be
+      # returned.
+      tt = tensor_tracer.TensorTracer()
+      loss = tt.trace_cpu(tf.compat.v1.get_default_graph(), loss, train_op)
+
+    return super(EstimatorSpec, cls).__new__(
+        cls,
+        mode=mode,
+        predictions=predictions,
+        loss=loss,
+        train_op=train_op,
+        eval_metric_ops=eval_metric_ops,
+        export_outputs=export_outputs,
+        training_chief_hooks=training_chief_hooks,
+        training_hooks=training_hooks,
+        scaffold=scaffold,
+        evaluation_hooks=evaluation_hooks,
+        prediction_hooks=prediction_hooks)
+
+  def _replace(self, **kwds):
+    """Return a new EstimatorSpec replacing specified fields with new values."""
+    if 'mode' in kwds:
+      if self.mode != kwds['mode']:
+        raise ValueError('mode of EstimatorSpec cannot be changed.')
+    new_fields = map(kwds.pop, self._fields, list(self))
+    return EstimatorSpec(*new_fields)
+
+
+class _TPUEstimatorSpec(
+    collections.namedtuple('TPUEstimatorSpec', [
+        'mode', 'predictions', 'loss', 'train_op', 'eval_metrics',
+        'export_outputs', 'scaffold_fn', 'host_call', 'training_hooks',
+        'evaluation_hooks', 'prediction_hooks'
+    ])):
+  """Ops and objects returned from a `model_fn` and passed to `TPUEstimator`.
+
+  This is a simplified implementation of `tf.contrib.tpu.EstimatorSpec`. See
+  tensorflow/contrib/tpu/python/tpu/tpu_estimator.py for more detailed
+  documentation.
+  """
+
+  def __new__(cls,
+              mode,
+              predictions=None,
+              loss=None,
+              train_op=None,
+              eval_metrics=None,
+              export_outputs=None,
+              scaffold_fn=None,
+              host_call=None,
+              training_hooks=None,
+              evaluation_hooks=None,
+              prediction_hooks=None):
+    """Creates a `_TPUEstimatorSpec` instance."""
+    train_op = _validate_estimator_spec_train_op(train_op, mode)
+    loss = _validate_estimator_spec_loss(loss, mode)
+    predictions = _validate_estimator_spec_predictions(predictions, mode)
+    export_outputs = _validate_estimator_spec_export_outputs(
+        export_outputs, predictions, mode)
+    training_hooks = _validate_estimator_spec_hooks(training_hooks)
+    evaluation_hooks = _validate_estimator_spec_hooks(evaluation_hooks)
+    prediction_hooks = _validate_estimator_spec_hooks(prediction_hooks)
+    return super(_TPUEstimatorSpec, cls).__new__(
+        cls,
+        mode=mode,
+        predictions=predictions,
+        loss=loss,
+        train_op=train_op,
+        eval_metrics=eval_metrics,
+        export_outputs=export_outputs,
+        scaffold_fn=scaffold_fn,
+        host_call=host_call,
+        training_hooks=training_hooks,
+        evaluation_hooks=evaluation_hooks,
+        prediction_hooks=prediction_hooks)
+
+  def as_estimator_spec(self):
+    """Creates an equivalent `EstimatorSpec` used by CPU train/eval."""
+    if not self.eval_metrics:
+      eval_metric_ops = None
+    else:
+      metric_fn, tensors = self.eval_metrics
+      eval_metric_ops = metric_fn(**tensors)
+    return EstimatorSpec(
+        mode=self.mode,
+        predictions=self.predictions,
+        loss=self.loss,
+        train_op=self.train_op,
+        eval_metric_ops=eval_metric_ops,
+        export_outputs=self.export_outputs,
+        training_hooks=self.training_hooks,
+        evaluation_hooks=self.evaluation_hooks,
+        prediction_hooks=self.prediction_hooks)
+
+
+# Used to generate possible error causes if the user provides a `Tensor` to an
+# EstimatorSpec that is not in the default graph.
+_default_graph_error_message_template = (
+    '{0} with "{1}" must be from the default graph. '
+    'Possible causes of this error include: \n\n'
+    '1) {0} was created outside the context of the default graph.'
+    '\n\n'
+    '2) The object passed through to EstimatorSpec was not created '
+    'in the most recent call to "model_fn".')
+
+
+def _validate_estimator_spec_train_op(train_op, mode):
+  """Validate train_op inputs for EstimatorSpec or TPUEstimatorSpec.
+
+  Args:
+    train_op: Op for the training step.
+    mode: A `ModeKeys`. Used to determine whether the train_op is acceptable for
+      use in the current mode; for example, if we are not training, this can be
+      None.
+
+  Returns:
+    train_op: Op for the training step.
+
+  Raises:
+    ValueError: If no train_op is passed during training.
+    TypeError:  If:
+                - train_op is neither a `Tensor` nor an Op.
+                - train_op is not part of the default graph.
+  """
+  if train_op is None:
+    if mode == ModeKeys.TRAIN:
+      raise ValueError('Missing train_op.')
+  else:
+    default_graph = tf.compat.v1.get_default_graph()
+    _check_is_tensor_or_operation(train_op, 'train_op')
+    if isinstance(train_op, tf.Variable):
+      train_op = train_op.op
+    if not (tf.executing_eagerly() or train_op.graph is default_graph):
+      raise ValueError(
+          _default_graph_error_message_template.format('train_op',
+                                                       train_op.name))
+  return train_op
+
+
+def _validate_estimator_spec_loss(loss, mode):
+  """Validate loss inputs for EstimatorSpec or TPUEstimatorSpec.
+
+  Args:
+    loss: Training loss `Tensor`. Must either be scalar, or with shape `[1]`.
+    mode: A `ModeKeys`. Used to determine whether the loss is acceptable for use
+      in the current mode; for example, None is acceptable if we are not
+      training or evaluating.
+
+  Returns:
+    loss: Training loss `Tensor`.
+
+  Raises:
+    ValueError: If the loss `Tensor` is not appropriately formatted.
+    TypeError:  If:
+                - a non-`Tensor`, non-None input is passed.
+                - the loss `Tensor` is not part of the default graph.
+  """
+  if loss is None:
+    if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
+      raise ValueError('Missing loss.')
+  else:
+    default_graph = tf.compat.v1.get_default_graph()
+    # Loss must be a tensor.
+    loss = _check_is_tensor(loss, 'loss')
+    loss_shape = loss.get_shape()
+    if loss_shape.num_elements() not in (None, 1):
+      raise ValueError('Loss must be scalar, given: {}'.format(loss))
+    if not loss_shape.is_compatible_with(tf.TensorShape([])):
+      loss = tf.reshape(loss, [])
+    if not (tf.executing_eagerly() or loss.graph is default_graph):
+      raise ValueError(
+          _default_graph_error_message_template.format('loss', loss.name))
+  return loss
+
+
+def _validate_estimator_spec_predictions(predictions, mode):
+  """Validate predictions inputs for EstimatorSpec or TPUEstimatorSpec.
+
+  Args:
+    predictions: Predictions `Tensor` or dict of `Tensor`.
+    mode: A `ModeKeys`. Used to determine whether the predictions are acceptable
+      for use in the current mode; None is acceptable if we are not making
+      predictions.
+
+  Returns:
+    predictions: Predictions `Tensor` or dict of `Tensor`.
+
+  Raises:
+    ValueError: If:
+      - predictions is None and we are in predict mode.
+      - predictions `Tensor` is not in default_graph or else it is a dict of
+        `Tensor` where at least one is not in default_graph.
+    TypeError:  If predictions is not a `Tensor` or dict of `Tensor`.
+  """
+  if predictions is None:
+    if mode == ModeKeys.PREDICT:
+      raise ValueError('Missing predictions.')
+    predictions = {}
+  else:
+    default_graph = tf.compat.v1.get_default_graph()
+    if isinstance(predictions, dict):
+      predictions = {
+          k: _check_is_tensor(v, 'predictions[{}]'.format(k))
+          for k, v in six.iteritems(predictions)
+      }
+      if not tf.executing_eagerly():
+        for key, value in six.iteritems(predictions):
+          if value.graph is not default_graph:
+            raise ValueError(
+                _default_graph_error_message_template.format(
+                    'prediction values', '{0}: {1}'.format(key, value.name)))
+    else:
+      # Predictions should be a tensor.
+      predictions = _check_is_tensor(predictions, 'predictions')
+      if not (tf.executing_eagerly() or predictions.graph is default_graph):
+        raise ValueError(
+            _default_graph_error_message_template.format(
+                'prediction values', predictions.name))
+  return predictions
+
+
+def _validate_estimator_spec_export_outputs(export_outputs, predictions, mode):
+  """Validate export_outputs inputs for EstimatorSpec or TPUEstimatorSpec.
+
+  Args:
+    export_outputs: Describes the output signatures to be exported to
+      `SavedModel` and used during serving.
+      A dict `{name: output}` where:
+      * name: An arbitrary name for this output.
+      * output: an `ExportOutput` object such as `ClassificationOutput`
+        `RegressionOutput`, or `PredictOutput`. Single-headed models should only
+        need to specify one entry in this dictionary. Multi-headed models should
+        specify one entry for each head, one of which must be named using
+        `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`.
+        If no entry is provided, a default `PredictOutput` mapping to
+        predictions will be created.
+    predictions: Predictions `Tensor` or dict of `Tensor`. Used in generation of
+      default outputs.
+    mode: A `ModeKeys`. Used to determine whether to validate at all; if the
+      EstimatorSpec is not for making predictions we can skip validation.
+
+  Returns:
+    ValueError: If validation fails.
+    TypeError: If the export_outputs is not a dict or the values of the dict are
+               not instances of type `ExportOutput`.
+  """
+  if mode == ModeKeys.PREDICT:
+    export_outputs = export_utils.get_export_outputs(export_outputs,
+                                                     predictions)
+  return export_outputs
+
+
+def _validate_estimator_spec_hooks(hooks):
+  """Validate SessionRunHooks for use in EstimatorSpec or TPUEstimatorSpec.
+
+  Args:
+    hooks: Iterable of `tf.train.SessionRunHook` objects to run on all workers.
+
+  Returns:
+    hooks: Iterable of `tf.train.SessionRunHook` objects.
+
+  Raises:
+    ValueError: If validation fails.
+    TypeError:  If any element of the iterable is not a SessionRunHook.
+  """
+  hooks = tuple(hooks or [])
+
+  for hook in hooks:
+    if not isinstance(hook, tf.compat.v1.train.SessionRunHook):
+      raise TypeError(
+          'All hooks must be SessionRunHook instances, given: {}'.format(hook))
+  return hooks
+
+
+def _validate_eval_metric_ops(eval_metric_ops):
+  """Validate eval_metric_ops for use in EstimatorSpec.
+
+  Args:
+    eval_metric_ops: Dict of metric results keyed by name.
+      The values of the dict can be one of the following: (1) instance of
+        `Metric` class. (2) Results of calling a metric_function, namely a
+        `(metric_tensor, update_op)` tuple. `metric_tensor` should be evaluated
+        without any impact on state (typically it is a pure computation based on
+        variables.). For example, it should not trigger the `update_op` or
+        require any input fetching.
+
+  Returns:
+    eval_metric_ops: Dict of metric results keyed by name.
+
+  Raises:
+    ValueError:  If:
+     - one of the eval_metric_ops `Metric` objects has no updates.
+     - there is at least one `Metric` update or result, `Tensor`, or Op that is
+       not in the default graph.
+    TypeError:   If:
+     - eval_metric_ops is not a dict or None.
+     - an element of eval_metric_ops is not a `Metric` or a 2-tuple.
+     - an element of eval_metric_ops has a sub-element that is not a `Tensor` or
+       an Op.
+  """
+  if eval_metric_ops is None:
+    eval_metric_ops = {}
+  else:
+    if not isinstance(eval_metric_ops, dict):
+      raise TypeError(
+          'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
+    for key, value in six.iteritems(eval_metric_ops):
+      # TODO(psv): When we deprecate the old metrics, throw an error here if
+      # the value is not an instance of `Metric` class.
+      if isinstance(value, Metric):
+        if not value.updates:  # Check if metric updates are available.
+          raise ValueError(
+              'Please call update_state(...) on the "{metric_name}" metric'
+              .format(metric_name=value.name))
+      else:
+        if not isinstance(value, tuple) or len(value) != 2:
+          raise TypeError(
+              'Values of eval_metric_ops must be (metric_value, update_op) '
+              'tuples, given: {} for key: {}'.format(value, key))
+  # Verify all tensors and ops are from default graph.
+  default_graph = tf.compat.v1.get_default_graph()
+  for key, value in list(six.iteritems(eval_metric_ops)):
+    if isinstance(value, Metric):
+      values_to_check = value.updates[:]
+      values_to_check.append(value.result())
+    else:
+      values_to_check = tf.nest.flatten(value)
+    for val in values_to_check:
+      if not (tf.executing_eagerly() or val.graph is default_graph):
+        raise ValueError(
+            _default_graph_error_message_template.format(
+                'eval_metric_ops', '{0}: {1}'.format(key, val.name)))
+  # Metric variables are by default not added to any collections. The variables
+  # are appended to the LOCAL_VARIABLES collection for initialization, and
+  # METRIC_VARIABLES for TFMA compatibility. Note that although collections are
+  # officially deprecated in TensorFlow 2, Estimators will continue using
+  # collections as long as it supports V1 graph mode.
+  vars_to_add = set()
+  for key, value in six.iteritems(eval_metric_ops):
+    if isinstance(value, Metric):
+      vars_to_add.update(value.variables)
+      # Convert Metric instances to (value_tensor, update_op) tuple.
+      eval_metric_ops[key] = (value.result(), value.updates[0])
+  _update_variable_collection(tf.compat.v1.GraphKeys.LOCAL_VARIABLES,
+                              vars_to_add)
+  _update_variable_collection(tf.compat.v1.GraphKeys.METRIC_VARIABLES,
+                              vars_to_add)
+
+  return eval_metric_ops
+
+
+def _update_variable_collection(collection_name, vars_to_add):
+  """Add variables to collection."""
+  collection = set(tf.compat.v1.get_collection(collection_name))
+  # Skip variables that are in the collection already.
+  vars_to_add = vars_to_add.difference(collection)
+  for v in vars_to_add:
+    tf.compat.v1.add_to_collection(collection_name, v)
+
+
+def _validate_scaffold(scaffold):
+  """Validate scaffold input for EstimatorSpec.
+
+  Args:
+    scaffold: A `tf.train.Scaffold` object that can be used to set
+      initialization, saver, and more to be used in training.
+
+  Returns:
+    scaffold: A `tf.train.Scaffold` object. If no scaffold is provided, then a
+      default is generated.
+
+  Raises:
+    TypeError: If the scaffold is not of type `monitored_session.Scaffold`
+      or None.
+  """
+  scaffold = scaffold or tf.compat.v1.train.Scaffold()
+  if not isinstance(scaffold, tf.compat.v1.train.Scaffold):
+    raise TypeError(
+        'scaffold must be tf.train.Scaffold. Given: {}'.format(scaffold))
+  return scaffold
+
+
+def _check_is_tensor_or_operation(x, name):
+  # TODO(b/154650521): Use tf.Tensor instead of core.Tensor.
+  if not isinstance(x, (tf.Operation, core.Tensor)):
+    raise TypeError('{} must be Operation or Tensor, given: {}'.format(name, x))
+
+
+def _check_is_tensor(x, tensor_name):
+  """Returns `x` if it is a `Tensor`, raises TypeError otherwise."""
+  if not isinstance(x, core.Tensor):
+    raise TypeError('{} must be Tensor, given: {}'.format(tensor_name, x))
+  return x
+
+
+@estimator_export('estimator.experimental.call_logit_fn')
+def call_logit_fn(logit_fn, features, mode, params, config):
+  """Calls logit_fn (experimental).
+
+  THIS FUNCTION IS EXPERIMENTAL. Keras layers/models are the recommended APIs
+  for logit and model composition.
+
+  A utility function that calls the provided logit_fn with the relevant subset
+  of provided arguments. Similar to tf.estimator._call_model_fn().
+
+  Args:
+    logit_fn: A logit_fn as defined above.
+    features: The features dict.
+    mode: TRAIN / EVAL / PREDICT ModeKeys.
+    params: The hyperparameter dict.
+    config: The configuration object.
+
+  Returns:
+    A logit Tensor, the output of logit_fn.
+
+  Raises:
+    ValueError: if logit_fn does not return a Tensor or a dictionary mapping
+      strings to Tensors.
+  """
+  logit_fn_args = function_utils.fn_args(logit_fn)
+  kwargs = {}
+  if 'mode' in logit_fn_args:
+    kwargs['mode'] = mode
+  if 'params' in logit_fn_args:
+    kwargs['params'] = params
+  if 'config' in logit_fn_args:
+    kwargs['config'] = config
+  logit_fn_results = logit_fn(features=features, **kwargs)
+
+  result_is_valid_dictionary = (
+      isinstance(logit_fn_results, dict) and
+      all([(isinstance(k, six.string_types) and isinstance(v, tf.Tensor))
+           for k, v in six.iteritems(logit_fn_results)]))
+  result_is_tensor = isinstance(logit_fn_results, tf.Tensor)
+
+  if not (result_is_valid_dictionary or result_is_tensor):
+    raise ValueError('logit_fn should return a Tensor or a dictionary mapping '
+                     'strings to Tensors.  logit_fn returned: %s' %
+                     logit_fn_results)
+
+  return logit_fn_results
+
+
+_VALID_MODEL_FN_ARGS = set(
+    ['features', 'labels', 'mode', 'params', 'self', 'config'])
+
+
+def verify_model_fn_args(model_fn, params):
+  """Verifies `model_fn` arguments."""
+  args = set(function_utils.fn_args(model_fn))
+  if 'features' not in args:
+    raise ValueError('model_fn (%s) must include features argument.' % model_fn)
+  if params is not None and 'params' not in args:
+    raise ValueError('model_fn (%s) does not include params argument, '
+                     'but params (%s) is passed to Estimator.' %
+                     (model_fn, params))
+  if params is None and 'params' in args:
+    tf.compat.v1.logging.warn(
+        'Estimator\'s model_fn (%s) includes params '
+        'argument, but params are not passed to Estimator.', model_fn)
+  non_valid_args = list(args - _VALID_MODEL_FN_ARGS)
+  if non_valid_args:
+    raise ValueError('model_fn (%s) has following not expected args: %s' %
+                     (model_fn, non_valid_args))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/run_config.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/run_config.py
new file mode 100644
index 00000000..8631473a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/run_config.py
@@ -0,0 +1,990 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Environment configuration object for Estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import copy
+import json
+import os
+
+import six
+import tensorflow as tf
+from tensorflow.core.protobuf import config_pb2
+from tensorflow.core.protobuf import rewriter_config_pb2
+from tensorflow.python.distribute import estimator_training as distribute_coordinator_training
+from tensorflow.python.distribute import parameter_server_strategy_v2
+from tensorflow.python.util import compat_internal
+from tensorflow.python.util import function_utils
+from tensorflow.python.util.tf_export import estimator_export
+
+
+_USE_DEFAULT = object()
+_VALID_DEVICE_FN_ARGS = set(['op'])
+
+# A list of the property names in RunConfig that the user is allowed to change.
+_DEFAULT_REPLACEABLE_LIST = [
+    'model_dir', 'tf_random_seed', 'save_summary_steps',
+    'save_checkpoints_steps', 'save_checkpoints_secs', 'session_config',
+    'keep_checkpoint_max', 'keep_checkpoint_every_n_hours',
+    'log_step_count_steps', 'train_distribute', 'device_fn', 'protocol',
+    'eval_distribute', 'experimental_distribute',
+    'experimental_max_worker_delay_secs', 'session_creation_timeout_secs',
+    'checkpoint_save_graph_def'
+]
+
+_SAVE_CKPT_ERR = (
+    '`save_checkpoints_steps` and `save_checkpoints_secs` cannot be both set.')
+
+_TF_CONFIG_ENV = 'TF_CONFIG'
+_TASK_ENV_KEY = 'task'
+_TASK_TYPE_KEY = 'type'
+_TASK_ID_KEY = 'index'
+_CLUSTER_KEY = 'cluster'
+_SERVICE_KEY = 'service'
+_SESSION_MASTER_KEY = 'session_master'
+_EVAL_SESSION_MASTER_KEY = 'eval_session_master'
+_MODEL_DIR_KEY = 'model_dir'
+_LOCAL_MASTER = ''
+_GRPC_SCHEME = 'grpc://'
+
+
+def _get_session_master(cluster_spec, task_type, task_id, tf_config):
+  """Returns the appropriate address for TensorFlow master.
+
+  The order of precedence to determine the TF session master is as follows:
+  1. If `tf_session_master` is set in TF_CONFIG environment variable, takes it.
+  2. If the cluster has only one node, returns empty string ''.
+  3. Returns the grpc address according to the task type and id in the cluster.
+     This is between-graph replication.
+
+  Note: task_type and task_id must be validated. Typically, validated using
+  `_validate_task_type_and_task_id`.
+
+  Args:
+    cluster_spec: A `ClusterSpec` instance.
+    task_type: String. Task type for current node.
+    task_id: Int. Task id for current node.
+    tf_config: Dict. Python dict for the TF_CONFIG environment variable.
+
+  Raises:
+    RuntimeError: If `cluster_spec` is not set.
+
+  """
+  if _SESSION_MASTER_KEY in tf_config:
+    return tf_config[_SESSION_MASTER_KEY]
+
+  if not cluster_spec:
+    raise RuntimeError('Internal error: `_get_session_master` '
+                       'does not expect empty cluster_spec.')
+
+  jobs = cluster_spec.jobs
+
+  # If there is only one node in the cluster, do things locally by setting
+  # master to ''.  If a service or user sets TF_CONFIG with a single node, it's
+  # more performant to use a direct master rather than an RPC service.
+  if len(jobs) == 1 and len(cluster_spec.job_tasks(jobs[0])) == 1:
+    return _LOCAL_MASTER
+
+  # Lookup the master in cluster_spec using task_type and task_id,
+  # if possible.
+  addresses = cluster_spec.job_tasks(task_type)
+  return _GRPC_SCHEME + addresses[task_id]
+
+
+def _get_eval_session_master(task_type, tf_config):
+  """Returns the appropriate address for TensorFlow evaluation master."""
+  if task_type == TaskType.EVALUATOR:
+    return tf_config.get(_EVAL_SESSION_MASTER_KEY, _LOCAL_MASTER)
+
+  return _LOCAL_MASTER
+
+
+def _count_ps(cluster_spec):
+  """Counts the number of parameter servers in cluster_spec."""
+  if not cluster_spec:
+    raise RuntimeError(
+        'Internal error: `_count_ps` does not expect empty cluster_spec.')
+
+  return len(cluster_spec.as_dict().get(TaskType.PS, []))
+
+
+def _count_worker(cluster_spec, chief_task_type):
+  """Counts the number of workers (including chief) in cluster_spec."""
+  if not cluster_spec:
+    raise RuntimeError(
+        'Internal error: `_count_worker` does not expect empty cluster_spec.')
+
+  return (len(cluster_spec.as_dict().get(TaskType.WORKER, [])) +
+          len(cluster_spec.as_dict().get(chief_task_type, [])))
+
+
+def _validate_service(service):
+  """Validates the service key."""
+  if service is not None and not isinstance(service, dict):
+    raise TypeError(
+        'If "service" is set in TF_CONFIG, it must be a dict. Given %s' %
+        type(service))
+  return service
+
+
+def _validate_task_type_and_task_id(cluster_spec, task_env, chief_task_type):
+  """Validates the task type and index in `task_env` according to cluster."""
+  if chief_task_type not in cluster_spec.jobs:
+    raise ValueError(
+        'If "cluster" is set in TF_CONFIG, it must have one "%s" node.' %
+        chief_task_type)
+  if len(cluster_spec.job_tasks(chief_task_type)) > 1:
+    raise ValueError(
+        'The "cluster" in TF_CONFIG must have only one "%s" node.' %
+        chief_task_type)
+
+  task_type = task_env.get(_TASK_TYPE_KEY, None)
+  task_id = task_env.get(_TASK_ID_KEY, None)
+
+  if not task_type:
+    raise ValueError('If "cluster" is set in TF_CONFIG, task type must be set.')
+  if task_id is None:
+    raise ValueError(
+        'If "cluster" is set in TF_CONFIG, task index must be set.')
+
+  task_id = int(task_id)
+
+  # Check the task id bounds. Upper bound is not necessary as
+  # - for evaluator, there is no upper bound.
+  # - for non-evaluator, task id is upper bounded by the number of jobs in
+  # cluster spec, which will be checked later (when retrieving the `master`)
+  if task_id < 0:
+    raise ValueError('Task index must be non-negative number.')
+
+  # Evaluator is not part of the training cluster.
+  if task_type == TaskType.EVALUATOR:
+    return task_type, task_id
+
+  if task_type not in cluster_spec.jobs:
+    raise ValueError(
+        '%s is not a valid task_type in the cluster_spec:\n'
+        '%s\n\n'
+        'Note that these values may be coming from the TF_CONFIG environment '
+        'variable.' % (task_type, cluster_spec))
+  addresses = cluster_spec.job_tasks(task_type)
+  if not 0 <= task_id < len(addresses):
+    raise ValueError(
+        '%d is not a valid task_id for task_type %s in the cluster_spec:\n'
+        '%s\n\n'
+        'Note that these values may be coming from the TF_CONFIG environment '
+        'variable.' % (task_id, task_type, cluster_spec))
+
+  return task_type, task_id
+
+
+def _get_global_id_in_cluster(cluster_spec, task_type, task_id,
+                              chief_task_type):
+  """Returns the global id in cluster."""
+  # Note: This is implementation details, which user should not rely on.
+  # The first id is 0, which is always for the `chief` node. All other nodes,
+  # except `ps`, are ordered alphabetical based on task type (alphabetically)
+  # and task id (ascendingly). `ps` are ordered last.
+
+  # Sort task names in cluster
+  task_type_ordered_list = [chief_task_type]
+  task_type_ordered_list.extend([
+      t for t in sorted(cluster_spec.jobs)
+      if t != chief_task_type and t != TaskType.PS
+  ])
+  if TaskType.PS in cluster_spec.jobs:
+    task_type_ordered_list.append(TaskType.PS)
+
+  next_global_id = 0
+  for t in task_type_ordered_list:
+    if t == task_type:
+      return next_global_id + task_id
+    next_global_id += len(cluster_spec.job_tasks(t))
+
+  # This should never happen.
+  raise RuntimeError('Internal Error: `task_type` ({}) is not in '
+                     'cluster_spec ({}).'.format(task_type, cluster_spec))
+
+
+def _validate_save_ckpt_with_replaced_keys(new_copy, replaced_keys):
+  """Validates the save ckpt properties."""
+  # Ensure one (and only one) of save_steps and save_secs is not None.
+  # Also, if user sets one save ckpt property, say steps, the other one (secs)
+  # should be set as None to improve usability.
+
+  save_steps = new_copy.save_checkpoints_steps
+  save_secs = new_copy.save_checkpoints_secs
+
+  if ('save_checkpoints_steps' in replaced_keys and
+      'save_checkpoints_secs' in replaced_keys):
+    # If user sets both properties explicitly, we need to error out if both
+    # are set or neither of them are set.
+    if save_steps is not None and save_secs is not None:
+      raise ValueError(_SAVE_CKPT_ERR)
+  elif 'save_checkpoints_steps' in replaced_keys and save_steps is not None:
+    new_copy._save_checkpoints_secs = None  # pylint: disable=protected-access
+  elif 'save_checkpoints_secs' in replaced_keys and save_secs is not None:
+    new_copy._save_checkpoints_steps = None  # pylint: disable=protected-access
+
+
+def _validate_properties(run_config):
+  """Validates the properties."""
+
+  def _validate(property_name, cond, message):
+    property_value = getattr(run_config, property_name)
+    if property_value is not None and not cond(property_value):
+      raise ValueError(message)
+
+  def _validate_delay(delay):
+    """Check that delay is an integer value.
+
+    Since this has to work for both Python2 and Python3 and PEP237 defines long
+    to be basically int, we cannot just use a lambda function.
+    """
+    try:
+      return isinstance(delay, (int, long))
+    except NameError:
+      # PEP237 redefines long to int for Python3
+      return isinstance(delay, int)
+
+  _validate(
+      'model_dir', lambda dir: dir, message='model_dir should be non-empty')
+
+  _validate(
+      'save_summary_steps',
+      lambda steps: steps >= 0,
+      message='save_summary_steps should be >= 0')
+
+  _validate(
+      'save_checkpoints_steps',
+      lambda steps: steps >= 0,
+      message='save_checkpoints_steps should be >= 0')
+  _validate(
+      'save_checkpoints_secs',
+      lambda secs: secs >= 0,
+      message='save_checkpoints_secs should be >= 0')
+
+  _validate(
+      'session_config',
+      lambda sc: isinstance(sc, config_pb2.ConfigProto),
+      message='session_config must be instance of ConfigProto')
+
+  _validate(
+      'keep_checkpoint_max',
+      lambda keep_max: keep_max >= 0,
+      message='keep_checkpoint_max should be >= 0')
+  _validate(
+      'keep_checkpoint_every_n_hours',
+      lambda keep_hours: keep_hours > 0,
+      message='keep_checkpoint_every_n_hours should be > 0')
+  _validate(
+      'log_step_count_steps',
+      lambda num_steps: num_steps > 0,
+      message='log_step_count_steps should be > 0')
+
+  _validate(
+      'tf_random_seed',
+      lambda seed: isinstance(seed, six.integer_types),
+      message='tf_random_seed must be integer.')
+
+  _validate(
+      'experimental_max_worker_delay_secs',
+      _validate_delay,
+      message='experimental_max_worker_delay_secs must be an integer if'
+      ' set.')
+  _validate(
+      'session_creation_timeout_secs',
+      lambda timeout_secs: timeout_secs > 0,
+      message='session_creation_timeout_secs should be > 0')
+
+  _validate(
+      'device_fn',
+      lambda device_fn: six.callable(device_fn) and set(
+          function_utils.fn_args(device_fn)) == _VALID_DEVICE_FN_ARGS,
+      message='device_fn must be callable with exactly'
+      ' one argument "op".')
+
+  _validate(
+      'protocol',
+      lambda protocol: protocol in (None, 'grpc', 'grpc+verbs'),
+      message='protocol should be grpc or grpc+verbs')
+
+
+def get_default_session_config():
+  """Returns tf.ConfigProto instance."""
+
+  rewrite_opts = rewriter_config_pb2.RewriterConfig(
+      meta_optimizer_iterations=rewriter_config_pb2.RewriterConfig.ONE)
+  graph_opts = config_pb2.GraphOptions(rewrite_options=rewrite_opts)
+
+  return config_pb2.ConfigProto(
+      allow_soft_placement=True, graph_options=graph_opts)
+
+
+class TaskType(object):
+  MASTER = 'master'
+  PS = 'ps'
+  WORKER = 'worker'
+  CHIEF = 'chief'
+  EVALUATOR = 'evaluator'
+
+
+@estimator_export('estimator.RunConfig')
+class RunConfig(object):
+  """This class specifies the configurations for an `Estimator` run."""
+
+  def __init__(self,
+               model_dir=None,
+               tf_random_seed=None,
+               save_summary_steps=100,
+               save_checkpoints_steps=_USE_DEFAULT,
+               save_checkpoints_secs=_USE_DEFAULT,
+               session_config=None,
+               keep_checkpoint_max=5,
+               keep_checkpoint_every_n_hours=10000,
+               log_step_count_steps=100,
+               train_distribute=None,
+               device_fn=None,
+               protocol=None,
+               eval_distribute=None,
+               experimental_distribute=None,
+               experimental_max_worker_delay_secs=None,
+               session_creation_timeout_secs=7200,
+               checkpoint_save_graph_def=True):
+    """Constructs a RunConfig.
+
+    All distributed training related properties `cluster_spec`, `is_chief`,
+    `master` , `num_worker_replicas`, `num_ps_replicas`, `task_id`, and
+    `task_type` are set based on the `TF_CONFIG` environment variable, if the
+    pertinent information is present. The `TF_CONFIG` environment variable is a
+    JSON object with attributes: `cluster` and `task`.
+
+    `cluster` is a JSON serialized version of `ClusterSpec`'s Python dict from
+    `server_lib.py`, mapping task types (usually one of the `TaskType` enums) to
+    a list of task addresses.
+
+    `task` has two attributes: `type` and `index`, where `type` can be any of
+    the task types in `cluster`. When `TF_CONFIG` contains said information,
+    the following properties are set on this class:
+
+    * `cluster_spec` is parsed from `TF_CONFIG['cluster']`. Defaults to {}. If
+      present, must have one and only one node in the `chief` attribute of
+      `cluster_spec`.
+    * `task_type` is set to `TF_CONFIG['task']['type']`. Must set if
+      `cluster_spec` is present; must be `worker` (the default value) if
+      `cluster_spec` is not set.
+    * `task_id` is set to `TF_CONFIG['task']['index']`. Must set if
+      `cluster_spec` is present; must be 0 (the default value) if
+      `cluster_spec` is not set.
+    * `master` is determined by looking up `task_type` and `task_id` in the
+      `cluster_spec`. Defaults to ''.
+    * `num_ps_replicas` is set by counting the number of nodes listed
+      in the `ps` attribute of `cluster_spec`. Defaults to 0.
+    * `num_worker_replicas` is set by counting the number of nodes listed
+      in the `worker` and `chief` attributes of `cluster_spec`. Defaults to 1.
+    * `is_chief` is determined based on `task_type` and `cluster`.
+
+    There is a special node with `task_type` as `evaluator`, which is not part
+    of the (training) `cluster_spec`. It handles the distributed evaluation job.
+
+    Example of non-chief node:
+    ```
+      cluster = {'chief': ['host0:2222'],
+                 'ps': ['host1:2222', 'host2:2222'],
+                 'worker': ['host3:2222', 'host4:2222', 'host5:2222']}
+      os.environ['TF_CONFIG'] = json.dumps(
+          {'cluster': cluster,
+           'task': {'type': 'worker', 'index': 1}})
+      config = RunConfig()
+      assert config.master == 'host4:2222'
+      assert config.task_id == 1
+      assert config.num_ps_replicas == 2
+      assert config.num_worker_replicas == 4
+      assert config.cluster_spec == server_lib.ClusterSpec(cluster)
+      assert config.task_type == 'worker'
+      assert not config.is_chief
+    ```
+
+    Example of chief node:
+    ```
+      cluster = {'chief': ['host0:2222'],
+                 'ps': ['host1:2222', 'host2:2222'],
+                 'worker': ['host3:2222', 'host4:2222', 'host5:2222']}
+      os.environ['TF_CONFIG'] = json.dumps(
+          {'cluster': cluster,
+           'task': {'type': 'chief', 'index': 0}})
+      config = RunConfig()
+      assert config.master == 'host0:2222'
+      assert config.task_id == 0
+      assert config.num_ps_replicas == 2
+      assert config.num_worker_replicas == 4
+      assert config.cluster_spec == server_lib.ClusterSpec(cluster)
+      assert config.task_type == 'chief'
+      assert config.is_chief
+    ```
+
+    Example of evaluator node (evaluator is not part of training cluster):
+    ```
+      cluster = {'chief': ['host0:2222'],
+                 'ps': ['host1:2222', 'host2:2222'],
+                 'worker': ['host3:2222', 'host4:2222', 'host5:2222']}
+      os.environ['TF_CONFIG'] = json.dumps(
+          {'cluster': cluster,
+           'task': {'type': 'evaluator', 'index': 0}})
+      config = RunConfig()
+      assert config.master == ''
+      assert config.evaluator_master == ''
+      assert config.task_id == 0
+      assert config.num_ps_replicas == 0
+      assert config.num_worker_replicas == 0
+      assert config.cluster_spec == {}
+      assert config.task_type == 'evaluator'
+      assert not config.is_chief
+    ```
+
+    N.B.: If `save_checkpoints_steps` or `save_checkpoints_secs` is set,
+    `keep_checkpoint_max` might need to be adjusted accordingly, especially in
+    distributed training. For example, setting `save_checkpoints_secs` as 60
+    without adjusting `keep_checkpoint_max` (defaults to 5) leads to situation
+    that checkpoint would be garbage collected after 5 minutes. In distributed
+    training, the evaluation job starts asynchronously and might fail to load or
+    find the checkpoint due to race condition.
+
+    Args:
+      model_dir: directory where model parameters, graph, etc are saved. If
+        `PathLike` object, the path will be resolved. If `None`, will use a
+        default value set by the Estimator.
+      tf_random_seed: Random seed for TensorFlow initializers. Setting this
+        value allows consistency between reruns.
+      save_summary_steps: Save summaries every this many steps.
+      save_checkpoints_steps: Save checkpoints every this many steps. Can not be
+        specified with `save_checkpoints_secs`.
+      save_checkpoints_secs: Save checkpoints every this many seconds. Can not
+        be specified with `save_checkpoints_steps`. Defaults to 600 seconds if
+        both `save_checkpoints_steps` and `save_checkpoints_secs` are not set in
+        constructor.  If both `save_checkpoints_steps` and
+        `save_checkpoints_secs` are `None`, then checkpoints are disabled.
+      session_config: a ConfigProto used to set session parameters, or `None`.
+      keep_checkpoint_max: The maximum number of recent checkpoint files to
+        keep. As new files are created, older files are deleted. If `None` or 0,
+        all checkpoint files are kept. Defaults to 5 (that is, the 5 most recent
+        checkpoint files are kept). If a saver is passed to the estimator, this
+        argument will be ignored.
+      keep_checkpoint_every_n_hours: Number of hours between each checkpoint to
+        be saved. The default value of 10,000 hours effectively disables the
+        feature.
+      log_step_count_steps: The frequency, in number of global steps, that the
+        global step and the loss will be logged during training.  Also controls
+        the frequency that the global steps / s will be logged (and written to
+        summary) during training.
+      train_distribute: An optional instance of `tf.distribute.Strategy`. If
+        specified, then Estimator will distribute the user's model during
+        training, according to the policy specified by that strategy. Setting
+        `experimental_distribute.train_distribute` is preferred.
+      device_fn: A callable invoked for every `Operation` that takes the
+        `Operation` and returns the device string. If `None`, defaults to the
+        device function returned by `tf.train.replica_device_setter` with
+        round-robin strategy.
+      protocol: An optional argument which specifies the protocol used when
+        starting server. `None` means default to grpc.
+      eval_distribute: An optional instance of `tf.distribute.Strategy`. If
+        specified, then Estimator will distribute the user's model during
+        evaluation, according to the policy specified by that strategy. Setting
+        `experimental_distribute.eval_distribute` is preferred.
+      experimental_distribute: An optional
+        `tf.contrib.distribute.DistributeConfig` object specifying
+        DistributionStrategy-related configuration. The `train_distribute` and
+        `eval_distribute` can be passed as parameters to `RunConfig` or set in
+        `experimental_distribute` but not both.
+      experimental_max_worker_delay_secs: An optional integer specifying the
+        maximum time a worker should wait before starting. By default, workers
+        are started at staggered times, with each worker being delayed by up to
+        60 seconds. This is intended to reduce the risk of divergence, which can
+        occur when many workers simultaneously update the weights of a randomly
+        initialized model. Users who warm-start their models and train them for
+        short durations (a few minutes or less) should consider reducing this
+        default to improve training times.
+      session_creation_timeout_secs: Max time workers should wait for a session
+        to become available (on initialization or when recovering a session)
+        with MonitoredTrainingSession. Defaults to 7200 seconds, but users may
+        want to set a lower value to detect problems with variable / session
+        (re)-initialization more quickly.
+      checkpoint_save_graph_def: Whether to save the GraphDef and MetaGraphDef
+        to `checkpoint_dir`. The GraphDef is saved after the session is created
+        as `graph.pbtxt`. MetaGraphDefs are saved out for every checkpoint as
+        `model.ckpt-*.meta`.
+
+    Raises:
+      ValueError: If both `save_checkpoints_steps` and `save_checkpoints_secs`
+      are set.
+    """
+    if (save_checkpoints_steps == _USE_DEFAULT and
+        save_checkpoints_secs == _USE_DEFAULT):
+      save_checkpoints_steps = None
+      save_checkpoints_secs = 600
+    elif save_checkpoints_secs == _USE_DEFAULT:
+      save_checkpoints_secs = None
+    elif save_checkpoints_steps == _USE_DEFAULT:
+      save_checkpoints_steps = None
+    elif (save_checkpoints_steps is not None and
+          save_checkpoints_secs is not None):
+      raise ValueError(_SAVE_CKPT_ERR)
+
+    self._verify_strategy_compatibility(train_distribute, eval_distribute)
+
+    tf_config = json.loads(os.environ.get(_TF_CONFIG_ENV, '{}'))
+    if tf_config:
+      tf.compat.v1.logging.info('TF_CONFIG environment variable: %s', tf_config)
+
+    model_dir = _get_model_dir(tf_config,
+                               compat_internal.path_to_str(model_dir))
+
+    RunConfig._replace(
+        self,
+        allowed_properties_list=_DEFAULT_REPLACEABLE_LIST,
+        model_dir=model_dir,
+        tf_random_seed=tf_random_seed,
+        save_summary_steps=save_summary_steps,
+        save_checkpoints_steps=save_checkpoints_steps,
+        save_checkpoints_secs=save_checkpoints_secs,
+        session_config=session_config,
+        keep_checkpoint_max=keep_checkpoint_max,
+        keep_checkpoint_every_n_hours=keep_checkpoint_every_n_hours,
+        log_step_count_steps=log_step_count_steps,
+        train_distribute=train_distribute,
+        device_fn=device_fn,
+        protocol=protocol,
+        eval_distribute=eval_distribute,
+        experimental_distribute=experimental_distribute,
+        experimental_max_worker_delay_secs=experimental_max_worker_delay_secs,
+        session_creation_timeout_secs=session_creation_timeout_secs,
+        checkpoint_save_graph_def=checkpoint_save_graph_def)
+
+    # TODO(frankchn,priyag): Eventually use distributed coordinator for TPUs.
+    if ((train_distribute and
+         not train_distribute.__class__.__name__.startswith('TPUStrategy')) or
+        (eval_distribute and
+         not eval_distribute.__class__.__name__.startswith('TPUStrategy')) or
+        experimental_distribute):
+      tf.compat.v1.logging.info(
+          'Initializing RunConfig with distribution strategies.')
+      distribute_coordinator_training.init_run_config(self, tf_config)
+    else:
+      self._init_distributed_setting_from_environment_var(tf_config)
+      self._maybe_overwrite_session_config_for_distributed_training()
+
+  def _verify_strategy_compatibility(self, train_distribute, eval_distribute):
+    if ((train_distribute is not None and train_distribute.__class__ ==
+         parameter_server_strategy_v2.ParameterServerStrategyV2) or
+        (eval_distribute is not None and eval_distribute.__class__ ==
+         parameter_server_strategy_v2.ParameterServerStrategyV2)):
+      raise ValueError('Please use `tf.compat.v1.distribute.experimental.Param'
+                       'eterServerStrategy` for parameter server strategy with '
+                       'estimator.')
+
+  def _maybe_overwrite_session_config_for_distributed_training(self):
+    """Overwrites the session_config for distributed training.
+
+    The default overwrite is optimized for between-graph training. Subclass
+    should override this method if necessary.
+    """
+    # Get session_config only for between-graph distributed mode (cluster_spec
+    # is present).
+    if not self._session_config and self._cluster_spec:
+      RunConfig._replace(
+          self,
+          allowed_properties_list=_DEFAULT_REPLACEABLE_LIST,
+          session_config=self._get_default_session_config_distributed())
+
+  def _get_default_session_config_distributed(self):
+    """Returns None or tf.ConfigProto instance with default device_filters set.
+
+    Device filters are set such that chief/master and worker communicates with
+    only ps. session_config=None for evaluators or any other TaskType.
+    """
+
+    rewrite_opts = rewriter_config_pb2.RewriterConfig(
+        meta_optimizer_iterations=rewriter_config_pb2.RewriterConfig.ONE)
+    graph_opts = config_pb2.GraphOptions(rewrite_options=rewrite_opts)
+
+    device_filters = None
+    if self._task_type == TaskType.MASTER:
+      device_filters = ['/job:ps', '/job:master']
+    elif self._task_type == TaskType.CHIEF:
+      device_filters = ['/job:ps', '/job:chief']
+    elif self._task_type == TaskType.WORKER:
+      device_filters = ['/job:ps', '/job:worker/task:%d' % self._task_id]
+    elif self._task_type == TaskType.PS:
+      device_filters = ['/job:ps', '/job:worker', '/job:chief', '/job:master']
+    else:
+      # If the task_type is `EVALUATOR` or something other than the ones in
+      # TaskType then don't set any device filters.
+      return None
+
+    return config_pb2.ConfigProto(
+        allow_soft_placement=True,
+        graph_options=graph_opts,
+        device_filters=device_filters)
+
+  def _init_distributed_setting_from_environment_var(self, tf_config):
+    """Initialize distributed properties based on `tf_config`."""
+
+    self._service = _validate_service(tf_config.get(_SERVICE_KEY))
+    self._cluster_spec = tf.train.ClusterSpec(tf_config.get(_CLUSTER_KEY, {}))
+    task_env = tf_config.get(_TASK_ENV_KEY, {})
+
+    if self._cluster_spec and TaskType.MASTER in self._cluster_spec.jobs:
+      return self._init_distributed_setting_from_environment_var_with_master(
+          tf_config)
+
+    if self._cluster_spec:
+      # Distributed mode.
+      self._task_type, self._task_id = _validate_task_type_and_task_id(
+          self._cluster_spec, task_env, TaskType.CHIEF)
+
+      self._evaluation_master = _get_eval_session_master(
+          self._task_type, tf_config)
+
+      if self._task_type != TaskType.EVALUATOR:
+        self._master = _get_session_master(self._cluster_spec, self._task_type,
+                                           self._task_id, tf_config)
+        self._num_ps_replicas = _count_ps(self._cluster_spec)
+        self._num_worker_replicas = _count_worker(
+            self._cluster_spec, chief_task_type=TaskType.CHIEF)
+        self._global_id_in_cluster = _get_global_id_in_cluster(
+            self._cluster_spec,
+            self._task_type,
+            self._task_id,
+            chief_task_type=TaskType.CHIEF)
+      else:
+        # Evaluator is not part of the training cluster.
+        self._cluster_spec = tf.train.ClusterSpec({})
+        self._master = _LOCAL_MASTER
+        self._num_ps_replicas = 0
+        self._num_worker_replicas = 0
+        self._global_id_in_cluster = None  # undefined
+
+      self._is_chief = self._task_type == TaskType.CHIEF
+    else:
+      # Local mode.
+      self._task_type = task_env.get(_TASK_TYPE_KEY, TaskType.WORKER)
+      self._task_id = int(task_env.get(_TASK_ID_KEY, 0))
+      self._global_id_in_cluster = 0
+
+      if self._task_type != TaskType.WORKER:
+        raise ValueError(
+            'If "cluster" is not set in TF_CONFIG, task type must be WORKER.')
+      if self._task_id != 0:
+        raise ValueError(
+            'If "cluster" is not set in TF_CONFIG, task index must be 0.')
+
+      self._master = tf_config.get(_SESSION_MASTER_KEY, _LOCAL_MASTER)
+      self._evaluation_master = tf_config.get(_EVAL_SESSION_MASTER_KEY,
+                                              _LOCAL_MASTER)
+      self._is_chief = True
+      self._num_ps_replicas = 0
+      self._num_worker_replicas = 1
+
+  def _init_distributed_setting_from_environment_var_with_master(
+      self, tf_config):
+    """Initialize distributed properties for legacy cluster with `master`."""
+    # There is no tech reason, why user cannot have chief and master in the same
+    # cluster, but it is super confusing (which is really the chief?). So, block
+    # this case.
+    if TaskType.CHIEF in self._cluster_spec.jobs:
+      raise ValueError('If `master` node exists in `cluster`, job '
+                       '`chief` is not supported.')
+
+    task_env = tf_config.get(_TASK_ENV_KEY, {})
+
+    self._task_type, self._task_id = _validate_task_type_and_task_id(
+        self._cluster_spec, task_env, TaskType.MASTER)
+
+    if self._task_type == TaskType.EVALUATOR:
+      raise ValueError('If `master` node exists in `cluster`, task_type '
+                       '`evaluator` is not supported.')
+
+    self._global_id_in_cluster = _get_global_id_in_cluster(
+        self._cluster_spec,
+        self._task_type,
+        self._task_id,
+        chief_task_type=TaskType.MASTER)
+
+    self._master = _get_session_master(self._cluster_spec, self._task_type,
+                                       self._task_id, tf_config)
+    self._evaluation_master = _get_eval_session_master(self._task_type,
+                                                       tf_config)
+    self._num_ps_replicas = _count_ps(self._cluster_spec)
+    self._num_worker_replicas = _count_worker(
+        self._cluster_spec, chief_task_type=TaskType.MASTER)
+
+    self._is_chief = self._task_type == TaskType.MASTER
+
+  @property
+  def cluster_spec(self):
+    return self._cluster_spec
+
+  @property
+  def device_fn(self):
+    """Returns the device_fn.
+
+    If device_fn is not `None`, it overrides the default
+    device function used in `Estimator`.
+    Otherwise the default one is used.
+    """
+    return self._device_fn
+
+  @property
+  def evaluation_master(self):
+    return self._evaluation_master
+
+  @property
+  def is_chief(self):
+    return self._is_chief
+
+  @property
+  def master(self):
+    return self._master
+
+  @property
+  def num_ps_replicas(self):
+    return self._num_ps_replicas
+
+  @property
+  def num_worker_replicas(self):
+    return self._num_worker_replicas
+
+  @property
+  def task_id(self):
+    return self._task_id
+
+  @property
+  def global_id_in_cluster(self):
+    """The global id in the training cluster.
+
+    All global ids in the training cluster are assigned from an increasing
+    sequence of consecutive integers. The first id is 0.
+
+    Note: Task id (the property field `task_id`) is tracking the index of the
+    node among all nodes with the SAME task type. For example, given the cluster
+    definition as follows:
+
+    ```
+      cluster = {'chief': ['host0:2222'],
+                 'ps': ['host1:2222', 'host2:2222'],
+                 'worker': ['host3:2222', 'host4:2222', 'host5:2222']}
+    ```
+
+    Nodes with task type `worker` can have id 0, 1, 2.  Nodes with task type
+    `ps` can have id, 0, 1. So, `task_id` is not unique, but the pair
+    (`task_type`, `task_id`) can uniquely determine a node in the cluster.
+
+    Global id, i.e., this field, is tracking the index of the node among ALL
+    nodes in the cluster. It is uniquely assigned.  For example, for the cluster
+    spec given above, the global ids are assigned as:
+    ```
+      task_type  | task_id  |  global_id
+      --------------------------------
+      chief      | 0        |  0
+      worker     | 0        |  1
+      worker     | 1        |  2
+      worker     | 2        |  3
+      ps         | 0        |  4
+      ps         | 1        |  5
+    ```
+
+    Returns:
+      An integer id.
+    """
+    return self._global_id_in_cluster
+
+  @property
+  def experimental_max_worker_delay_secs(self):
+    return self._experimental_max_worker_delay_secs
+
+  @property
+  def task_type(self):
+    return self._task_type
+
+  @property
+  def tf_random_seed(self):
+    return self._tf_random_seed
+
+  @property
+  def save_summary_steps(self):
+    return self._save_summary_steps
+
+  @property
+  def save_checkpoints_secs(self):
+    return self._save_checkpoints_secs
+
+  @property
+  def session_config(self):
+    return self._session_config
+
+  @property
+  def save_checkpoints_steps(self):
+    return self._save_checkpoints_steps
+
+  @property
+  def checkpoint_save_graph_def(self):
+    return self._checkpoint_save_graph_def
+
+  @property
+  def keep_checkpoint_max(self):
+    return self._keep_checkpoint_max
+
+  @property
+  def session_creation_timeout_secs(self):
+    return self._session_creation_timeout_secs
+
+  @property
+  def keep_checkpoint_every_n_hours(self):
+    return self._keep_checkpoint_every_n_hours
+
+  @property
+  def log_step_count_steps(self):
+    return self._log_step_count_steps
+
+  @property
+  def model_dir(self):
+    return self._model_dir
+
+  @property
+  def service(self):
+    """Returns the platform defined (in TF_CONFIG) service dict."""
+    return self._service
+
+  @property
+  def train_distribute(self):
+    """Optional `tf.distribute.Strategy` for training."""
+    return self._train_distribute
+
+  @property
+  def eval_distribute(self):
+    """Optional `tf.distribute.Strategy` for evaluation."""
+    return self._eval_distribute
+
+  @property
+  def protocol(self):
+    """Returns the optional protocol value."""
+    return self._protocol
+
+  def replace(self, **kwargs):
+    """Returns a new instance of `RunConfig` replacing specified properties.
+
+    Only the properties in the following list are allowed to be replaced:
+
+      - `model_dir`,
+      - `tf_random_seed`,
+      - `save_summary_steps`,
+      - `save_checkpoints_steps`,
+      - `save_checkpoints_secs`,
+      - `session_config`,
+      - `keep_checkpoint_max`,
+      - `keep_checkpoint_every_n_hours`,
+      - `log_step_count_steps`,
+      - `train_distribute`,
+      - `device_fn`,
+      - `protocol`.
+      - `eval_distribute`,
+      - `experimental_distribute`,
+      - `experimental_max_worker_delay_secs`,
+
+    In addition, either `save_checkpoints_steps` or `save_checkpoints_secs`
+    can be set (should not be both).
+
+    Args:
+      **kwargs: keyword named properties with new values.
+
+    Raises:
+      ValueError: If any property name in `kwargs` does not exist or is not
+        allowed to be replaced, or both `save_checkpoints_steps` and
+        `save_checkpoints_secs` are set.
+
+    Returns:
+      a new instance of `RunConfig`.
+    """
+    return RunConfig._replace(
+        copy.deepcopy(self),
+        allowed_properties_list=_DEFAULT_REPLACEABLE_LIST,
+        **kwargs)
+
+  @staticmethod
+  def _replace(config, allowed_properties_list=None, **kwargs):
+    """See `replace`.
+
+    N.B.: This implementation assumes that for key named "foo", the underlying
+    property the RunConfig holds is "_foo" (with one leading underscore).
+
+    Args:
+      config: The RunConfig to replace the values of.
+      allowed_properties_list: The property name list allowed to be replaced.
+      **kwargs: keyword named properties with new values.
+
+    Raises:
+      ValueError: If any property name in `kwargs` does not exist or is not
+        allowed to be replaced, or both `save_checkpoints_steps` and
+        `save_checkpoints_secs` are set.
+
+    Returns:
+      a new instance of `RunConfig`.
+    """
+
+    allowed_properties_list = allowed_properties_list or []
+
+    for key, new_value in six.iteritems(kwargs):
+      if key in allowed_properties_list:
+        setattr(config, '_' + key, new_value)
+        continue
+
+      raise ValueError(
+          'Replacing {} is not supported. Allowed properties are {}.'.format(
+              key, allowed_properties_list))
+
+    _validate_save_ckpt_with_replaced_keys(config, kwargs.keys())
+    _validate_properties(config)
+    return config
+
+
+def _get_model_dir(tf_config, model_dir):
+  """Returns `model_dir` based user provided `tf_config` or `model_dir`."""
+  # pylint: disable=g-explicit-bool-comparison
+
+  # Empty string is treated as False in Python condition check, which triggers
+  # some confusing error messages. For example, 'a or b' returns None if a is ''
+  # and b is None. `None` is allowed for model_dir but '' is not allowed. Here,
+  # explicitly check empty string to provide clear error message.
+  if model_dir == '':
+    raise ValueError('model_dir should be non-empty.')
+
+  model_dir_in_tf_config = tf_config.get('model_dir')
+  if model_dir_in_tf_config == '':
+    raise ValueError('model_dir in TF_CONFIG should be non-empty.')
+
+  if model_dir_in_tf_config:
+    if model_dir and model_dir_in_tf_config != model_dir:
+      raise ValueError(
+          '`model_dir` provided in RunConfig construct, if set, '
+          'must have the same value as the model_dir in TF_CONFIG. '
+          'model_dir: {}\nTF_CONFIG["model_dir"]: {}.\n'.format(
+              model_dir, model_dir_in_tf_config))
+
+    tf.compat.v1.logging.info('Using model_dir in TF_CONFIG: %s',
+                              model_dir_in_tf_config)
+
+  return model_dir or model_dir_in_tf_config
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/analytics.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/analytics.py
new file mode 100644
index 00000000..ac78b1ed
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/analytics.py
@@ -0,0 +1,37 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Analytics helpers library."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+
+def track_usage(tool_id, tags):
+  """No usage tracking for external library.
+
+  Args:
+    tool_id: A string identifier for tool to be tracked.
+    tags: list of string tags that will be added to the tracking.
+  """
+  del tool_id, tags  # Unused externally.
+
+
+def track_numerical_issues(exc_info):
+  """No tracking for external library.
+
+  Args:
+    exc_info: Output from `sys.exc_info` (type, value, traceback)
+  """
+  del exc_info
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/checkpoint_converter.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/checkpoint_converter.py
new file mode 100644
index 00000000..933435e8
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tools/checkpoint_converter.py
@@ -0,0 +1,368 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""Checkpoint converter for Canned Estimators in TF 1.x.
+
+This checkpoint converter tool is mainly for Canned Estimators, including DNN
+Linear and DNNLinearCombined estimators. The allowed optimizers to be converted
+include Adam, Adagrad, Ftrl, RMSProp, and SGD.
+
+Note that, this converter is not suitable for the case where 'dnn_optimizer'
+and 'linear_optimizer' in DNNLinearCombined model are the same.
+
+If your current canned estimators and checkpoints are from TF 1.x, after you
+migrate the canned estimator to v2 with `tf.keras.optimizers.*`, the converted
+checkpoint allow you to restore and retrain the model in TF 2.0.
+
+Usage:
+  python checkpoint_convert.py '/path/to/checkpoint' '/path/to/graph.pbtxt' \
+      '/path/to/new_checkpoint'
+
+For example, if there is a V1 checkpoint to be converted and the files include:
+  /tmp/my_checkpoint/model.ckpt-100.data-00000-of-00001
+  /tmp/my_checkpoint/model.ckpt-100.index
+  /tmp/my_checkpoint/model.ckpt-100.meta
+  /tmp/my_checkpoint/graph.pbtxt
+
+use the following command:
+  mkdir /tmp/my_converted_checkpoint &&
+  python checkpoint_convert.py \
+      /tmp/my_checkpoint/model.ckpt-100 /tmp/my_checkpoint/graph.pbtxt \
+      /tmp/my_converted_checkpoint/model.ckpt-100
+
+This will generate three converted checkpoint files corresponding to the three
+old checkpoint files in the new directory:
+  /tmp/my_converted_checkpoint/model.ckpt-100.data-00000-of-00001
+  /tmp/my_converted_checkpoint/model.ckpt-100.index
+  /tmp/my_converted_checkpoint/model.ckpt-100.meta
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import argparse
+import sys
+import tensorflow as tf
+from google.protobuf import text_format
+from tensorflow.core.framework import graph_pb2
+from tensorflow.python.keras.optimizer_v2 import adagrad
+from tensorflow.python.keras.optimizer_v2 import adam
+from tensorflow.python.keras.optimizer_v2 import ftrl
+from tensorflow.python.keras.optimizer_v2 import gradient_descent
+from tensorflow.python.keras.optimizer_v2 import rmsprop
+
+# Optimizer name mapping from v1 to v2.
+OPT_NAME_V1_TO_V2 = {
+    'Adagrad': 'Adagrad',
+    'RMSProp': 'RMSprop',
+    'Ftrl': 'Ftrl',
+    'Adam': 'Adam',
+    'SGD': 'SGD',
+}
+
+# Hyper-paratmeters of optimizer in checkpoint.
+HP_IN_CKPT = {
+    'Adam': {
+        'beta1_power': 'training/Adam/beta_1',
+        'beta2_power': 'training/Adam/beta_2',
+    },
+}
+
+# Optimzier variable name mapping from v1 to v2.
+OPT_VAR_NAME_V1_TO_V2 = {
+    'Adam': {
+        'Adam': 'm',
+        'Adam_1': 'v',
+    },
+    'Ftrl': {
+        'Ftrl': 'accumulator',
+        'Ftrl_1': 'linear',
+    },
+    'RMSProp': {
+        'RMSProp': 'rms',
+        'RMSProp_1': None,
+    },
+    'Adagrad': {
+        'Adagrad': 'accumulator',
+    },
+}
+
+# Hyper-paratmeters of optimizer in graph.
+HP_IN_GRAPH = {
+    'Adam': ['decay', 'learning_rate'],
+    'Ftrl': [
+        'decay', 'l1_regularization_strength', 'l2_regularization_strength',
+        'beta', 'learning_rate', 'learning_rate_power'
+    ],
+    'RMSProp': ['decay', 'learning_rate', 'momentum', 'rho'],
+    'Adagrad': ['decay', 'learning_rate'],
+    'SGD': ['decay', 'learning_rate', 'momentum'],
+}
+
+# optimizer v2 instance.
+OPT_V2_INSTANCE = {
+    'Adagrad': adagrad.Adagrad(),
+    'Adam': adam.Adam(),
+    'Ftrl': ftrl.Ftrl(),
+    'RMSProp': rmsprop.RMSprop(),
+    'SGD': gradient_descent.SGD(),
+}
+
+
+def _add_new_variable(initial_value, var_name_v2, var_name_v1, var_map,
+                      var_names_map):
+  """Creates a new variable and add it to the variable maps."""
+  var = tf.Variable(initial_value, name=var_name_v2)
+  var_map[var_name_v2] = var
+  var_names_map[var_name_v2] = var_name_v1
+
+
+def _add_opt_variable(opt_name_v2, var_name_v1, idx, suffix_v2, reader, var_map,
+                      var_names_map):
+  """Adds a new optimizer v2 variable."""
+  var_name_v2 = 'training/' + opt_name_v2 + '/' + var_name_v1[:idx] + suffix_v2
+  tensor = reader.get_tensor(var_name_v1)
+  _add_new_variable(tensor, var_name_v2, var_name_v1, var_map, var_names_map)
+
+
+def _convert_variables_in_ckpt(opt_name_v1, reader, variable_names, var_map,
+                               var_names_map, est_type):
+  """Converts all variables in checkpoint from v1 to v2."""
+  global_step = None
+  hp_ckpt = None
+  # Global step is needed for Adam for hyper parameter conversion.
+  if opt_name_v1 == 'Adam':
+    global_step = reader.get_tensor('global_step')
+  if opt_name_v1 in HP_IN_CKPT:
+    hp_ckpt = HP_IN_CKPT[opt_name_v1]
+  opt_name_v2 = OPT_NAME_V1_TO_V2[opt_name_v1]
+
+  # For variables with equivalent mapping in checkpoint. There are three types:
+  # 1) Hyper parameters. This is mainly for Adam optimizer.
+  # 2) Optimizer variables.
+  # 3) Model variables.
+  for var_name in variable_names:
+    # If a hyper parameter variable is in the checkpoint.
+    if hp_ckpt and any(hp_name in var_name for hp_name in hp_ckpt):
+      for hp_name in hp_ckpt:
+        if hp_name in var_name:
+          var_name_v2 = hp_ckpt[hp_name]
+          tensor = reader.get_tensor(var_name)
+          # For Adam optimizer, in the old checkpoint, the optimizer variables
+          # are beta1_power and beta2_power. The corresponding variables in the
+          # new checkpoint are beta_1 and beta_2, and
+          # beta_1 = pow(beta1_power, 1/global_step)
+          # beta_2 = pow(beta2_power, 1/global_step)
+          tensor = tf.math.pow(tensor, 1.0 / global_step)
+          _add_new_variable(tensor, var_name_v2, var_name, var_map,
+                            var_names_map)
+          break
+    # If it's an optimizer variable.
+    elif opt_name_v1 in var_name:
+      suffix_mapping = OPT_VAR_NAME_V1_TO_V2[opt_name_v1]
+      suffix_v1 = var_name.rsplit('/')[-1]
+      suffix_v2 = suffix_mapping[suffix_v1]
+      if suffix_v2:
+        # For DNN model.
+        if est_type == 'dnn':
+          # The optimizer variable of DNN model in TF 1.x has 't_0' in its
+          # name (b/131719899). This is amended in TF 2.0.
+          idx = var_name.rfind('t_0')
+          _add_opt_variable(opt_name_v2, var_name, idx, suffix_v2, reader,
+                            var_map, var_names_map)
+        # for Linear model.
+        elif est_type == 'linear':
+          # The optimizer variable of Linear model in TF 1.x has 'part_0' in its
+          # name (b/131719899). This is amended in TF 2.0.
+          idx = var_name.rfind('part_0')
+          _add_opt_variable(opt_name_v2, var_name, idx, suffix_v2, reader,
+                            var_map, var_names_map)
+        # for DNNLinearCombined model.
+        else:
+          idx = var_name.rfind(suffix_v1)
+          _add_opt_variable(opt_name_v2, var_name, idx, suffix_v2, reader,
+                            var_map, var_names_map)
+    # If it's a model variable which is already backward compatible.
+    else:
+      tensor = reader.get_tensor(var_name)
+      _add_new_variable(tensor, var_name, var_name, var_map, var_names_map)
+
+
+def _convert_hyper_params_in_graph(graph_from_path, opt_name_v1, var_map,
+                                   var_names_map):
+  """Generates hyper parameters for optimizer v2 from graph.pbtxt."""
+  with tf.io.gfile.GFile(graph_from_path) as f:
+    graph_def = text_format.Parse(f.read(), graph_pb2.GraphDef())
+
+  # In keras optimizer, the hyper parameters are also stored in the checkpoint,
+  # while v1 checkpoint doesn't contain any hyper parameters. For the
+  # hyper parameter variables, there are two cases:
+  # 1) The hyper parameter exist in the graph.
+  #    If so, the hyper parameter value needs to be extracted from the graph
+  #    node.
+  # 2) The hyper parameter doesn't exist in the graph.
+  #    The value of the hyper parameter is set as the default value from the
+  #    config.
+  nodes_full = HP_IN_GRAPH[opt_name_v1]
+  nodes_in_graph = []
+  opt_name_v2 = OPT_NAME_V1_TO_V2[opt_name_v1]
+  tf.compat.v1.logging.info('For hyper parameter variables that are in Graph:')
+  for node in graph_def.node:
+    node_name = node.name.rsplit('/')[-1]
+    # For case 1), if the hyper parameter of the keras optimizer can be found
+    # in the graph, the graph node value is extracted as the hyper parameter
+    # variable value, and added to the new variable list.
+    if opt_name_v1 + '/' + node_name in nodes_full:
+      hp_value = node.attr.get('value').tensor.float_val[0]
+      hp_name_v2 = 'training/' + opt_name_v2 + '/' + node_name
+      tf.compat.v1.logging.info(
+          'Hyper parameter {} with value {} found in Graph.'.format(
+              hp_name_v2, hp_value))
+      _add_new_variable(hp_value, hp_name_v2, node_name, var_map, var_names_map)
+      # Adds this node to nodes_in_graph
+      nodes_in_graph.append(node_name)
+
+  # For case 2), if the hyper parameter is not in graph, we need to add it
+  # manually. The tensor value is its default value from optimizer v2 config.
+  nodes_not_in_graph = sorted(list(set(nodes_full) - set(nodes_in_graph)))
+  opt_v2_config = OPT_V2_INSTANCE[opt_name_v1].get_config()
+  tf.compat.v1.logging.info(
+      'For hyper parameter variables that are NOT in Graph:')
+  for node_name in nodes_not_in_graph:
+    hp_name_v2 = 'training/' + opt_name_v2 + '/' + node_name
+    tf.compat.v1.logging.info(
+        'Hyper parameter {} with default value {} is added.'.format(
+            hp_name_v2, opt_v2_config[node_name]))
+    _add_new_variable(opt_v2_config[node_name], hp_name_v2, node_name, var_map,
+                      var_names_map)
+
+
+def convert_checkpoint(estimator_type, source_checkpoint, source_graph,
+                       target_checkpoint):
+  """Converts checkpoint from TF 1.x to TF 2.0 for CannedEstimator.
+
+  Args:
+    estimator_type: The type of estimator to be converted. So far, the allowed
+      args include 'dnn', 'linear', and 'combined'.
+    source_checkpoint: Path to the source checkpoint file to be read in.
+    source_graph: Path to the source graph file to be read in.
+    target_checkpoint: Path to the target checkpoint to be written out.
+  """
+  with tf.Graph().as_default():
+    # Get v1 optimizer names and it's corresponding variable name
+    reader = tf.compat.v1.train.NewCheckpointReader(source_checkpoint)
+    variable_names = sorted(reader.get_variable_to_shape_map())
+    opt_names_v1 = {}
+    for var_name in variable_names:
+      for opt_name in OPT_NAME_V1_TO_V2:
+        if opt_name in var_name:
+          opt_names_v1[opt_name] = var_name
+
+    # SGD doesn't appear in optimizer variables, so we need to add it manually
+    # if no optimizer is found in checkpoint for DNN or Linear model.
+    if not opt_names_v1:
+      if estimator_type == 'dnn' or estimator_type == 'linear':
+        opt_names_v1['SGD'] = ''
+      # As the case is not handled in the converter if dnn_optimizer and
+      # linear_optimizer in DNNLinearCombined model are the same, an error is
+      # is raised if two SGD optimizers are used in DNNLinearCombined model.
+      elif estimator_type == 'combined':
+        raise ValueError('Two `SGD` optimizers are used in DNNLinearCombined '
+                         'model, and this is not handled by the checkpoint '
+                         'converter.')
+
+    # A dict mapping from v2 variable name to the v2 variable.
+    var_map = {}
+    # A dict mapping from v2 variable name to v1 variable name.
+    var_names_map = {}
+
+    # Determine the names of dnn_optimizer and linear_optimizer in
+    # DNNLinearCombined model.
+    if estimator_type == 'combined':
+      linear_opt_v1 = None
+      if len(opt_names_v1) == 1:  # When one of the optimizer is 'SGD'.
+        key = list(opt_names_v1.keys())[0]
+        # Case 1: linear_optimizer is non-SGD, and dnn_optimizer is SGD.
+        if opt_names_v1[key].startswith('linear/linear_model/'):
+          linear_opt_v1 = key
+        # Case 2: linear_optimizer is SGD, and dnn_optimizer is non-SGD.
+        if not linear_opt_v1:
+          linear_opt_v1 = 'SGD'
+        opt_names_v1['SGD'] = ''
+      else:  # two non-SGD optimizers
+        for key in opt_names_v1:
+          if opt_names_v1[key].startswith('linear/linear_model/'):
+            linear_opt_v1 = key
+      # Add the 'iter' hyper parameter to the new checkpoint for
+      # linear_optimizer. Note dnn_optimizer uses global_step.
+      tensor = reader.get_tensor('global_step')
+      var_name_v2 = 'training/' + OPT_NAME_V1_TO_V2[linear_opt_v1] + '/iter'
+      var_name_v1 = 'global_step'
+      _add_new_variable(tensor, var_name_v2, var_name_v1, var_map,
+                        var_names_map)
+
+    for opt_name_v1 in opt_names_v1:
+      # Convert all existing variables from checkpoint.
+      _convert_variables_in_ckpt(opt_name_v1, reader, variable_names, var_map,
+                                 var_names_map, estimator_type)
+      # Convert hyper parameters for optimizer v2 from the graph.
+      _convert_hyper_params_in_graph(source_graph, opt_name_v1, var_map,
+                                     var_names_map)
+
+    # Log the variable mapping from opt v1 to v2.
+    tf.compat.v1.logging.info(
+        '<----- Variable names converted (v1 --> v2): ----->')
+    for name_v2 in var_names_map:
+      tf.compat.v1.logging.info('%s --> %s' % (var_names_map[name_v2], name_v2))
+
+    # Save to checkpoint v2.
+    saver = tf.compat.v1.train.Saver(var_list=var_map)
+    with tf.compat.v1.Session() as sess:
+      sess.run(tf.compat.v1.initializers.global_variables())
+      tf.compat.v1.logging.info('Writing checkpoint_to_path %s' %
+                                target_checkpoint)
+      saver.save(sess, target_checkpoint)
+
+
+def main(_):
+  convert_checkpoint(
+      FLAGS.estimator_type,
+      FLAGS.source_checkpoint,
+      FLAGS.source_graph,
+      FLAGS.target_checkpoint,
+  )
+
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      'estimator_type',
+      type=str,
+      choices=['dnn', 'linear', 'combined'],
+      help='The type of estimator to be converted. So far, the checkpoint '
+      'converter only supports Canned Estimator. So the allowed types '
+      'include linear, dnn and combined.')
+  parser.add_argument(
+      'source_checkpoint',
+      type=str,
+      help='Path to source checkpoint file to be read in.')
+  parser.add_argument(
+      'source_graph', type=str, help='Path to source graph file to be read in.')
+  parser.add_argument(
+      'target_checkpoint',
+      type=str,
+      help='Path to checkpoint file to be written out.')
+  FLAGS, unparsed = parser.parse_known_args()
+  tf.compat.v1.app.run(main=main, argv=[sys.argv[0]] + unparsed)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/__init__.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/_tpu_estimator_embedding.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/_tpu_estimator_embedding.py
new file mode 100644
index 00000000..03bee4fd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/_tpu_estimator_embedding.py
@@ -0,0 +1,628 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""Tooling for support TPU embedding in TPUEstimator."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import tensorflow as tf
+
+from tensorflow.python.feature_column import feature_column as core_fc
+from tensorflow.python.feature_column import feature_column_lib as core_fc_lib
+from tensorflow.python.feature_column import utils as fc_utils
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import sparse_ops
+from tensorflow.python.tpu import feature_column as tpu_fc
+from tensorflow.python.tpu import feature_column_v2 as tpu_fc_v2
+from tensorflow.python.tpu import tpu_embedding
+from tensorflow.python.tpu.tpu_embedding import AdagradParameters
+from tensorflow.python.tpu.tpu_embedding import AdamParameters
+from tensorflow.python.tpu.tpu_embedding import FtrlParameters
+from tensorflow.python.tpu.tpu_embedding import MomentumParameters
+from tensorflow.python.tpu.tpu_embedding import RMSPropParameters
+from tensorflow.python.tpu.tpu_embedding import StochasticGradientDescentParameters
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+
+# pylint: disable=protected-access
+_TPU_EMBEDDING_COLUMN_CLASSES = (tpu_fc._TPUEmbeddingColumn,
+                                 tpu_fc._TPUSharedEmbeddingColumn,
+                                 tpu_fc_v2._TPUEmbeddingColumnV2,
+                                 tpu_fc_v2._TPUSharedEmbeddingColumnV2)
+_TPU_DEVICE_SPECIFIC_EMBEDDING_COLUMNS = (
+    tpu_fc_v2._TPUDeviceSpecificEmbeddingColumnV2,
+    tpu_fc_v2._TPUSharedDeviceSpecificEmbeddingColumnV2)
+_EMBEDDING_COLUMN_CLASSES = (core_fc._EmbeddingColumn,
+                             core_fc_lib.EmbeddingColumn,
+                             core_fc._SharedEmbeddingColumn)
+_SUPPORTED_FEATURE_COLUMNS = (core_fc._NumericColumn, core_fc_lib.NumericColumn)
+
+_SUPPORTED_OPTIMIZERS = (
+    AdagradParameters,
+    AdamParameters,
+    FtrlParameters,
+    StochasticGradientDescentParameters,
+    MomentumParameters,
+    RMSPropParameters,
+)
+
+# pylint: enable=protected-access
+
+_TABLE_NAME_PREFIX = 'tbl_'
+_LEN_TABLE_NAME_PREFIX = len(_TABLE_NAME_PREFIX)
+
+
+def _get_table_name_from_embedding_var_name(embedding_var_name):
+  return '{}{}'.format(_TABLE_NAME_PREFIX, embedding_var_name)
+
+
+def _get_embedding_var_name_from_table_name(table_name):
+  return table_name[_LEN_TABLE_NAME_PREFIX:]
+
+
+def _get_embedding_variable_name(scope_name, var_name):
+  if scope_name:
+    scope_name = scope_name + '/'
+  return '{}{}'.format(scope_name, var_name)
+
+
+def _get_slot_variable_names(scope_name, var_name, optimization_parameters):
+  """Return embedding variable names which are consistent with CPU runs."""
+  if scope_name:
+    scope_name = scope_name + '/'
+  if isinstance(optimization_parameters,
+                tf.compat.v1.tpu.experimental.AdagradParameters):
+    return tpu_embedding.AdagradSlotVariableName('{}{}/Adagrad'.format(
+        scope_name, var_name))
+  elif isinstance(optimization_parameters,
+                  tf.compat.v1.tpu.experimental.AdamParameters):
+    return tpu_embedding.AdamSlotVariableNames(
+        '{}{}/Adam/m'.format(scope_name, var_name),
+        '{}{}/Adam/v'.format(scope_name, var_name))
+  elif isinstance(optimization_parameters,
+                  tf.compat.v1.tpu.experimental.FtrlParameters):
+    return tpu_embedding.FtrlSlotVariableName(
+        '{}{}/Ftrl'.format(scope_name, var_name),  # accumulator
+        '{}{}/Ftrl_1'.format(scope_name, var_name))  # linear
+  elif isinstance(optimization_parameters, MomentumParameters):
+    return tpu_embedding.MomentumSlotVariableName('{}{}/Momentum'.format(
+        scope_name, var_name))
+  elif isinstance(optimization_parameters, RMSPropParameters):
+    return tpu_embedding.RMSPropSlotVariableNames(
+        ms='{}{}/RMSProp/ms'.format(scope_name, var_name),
+        mom='{}{}/RMSProp/mom'.format(scope_name, var_name),
+    )
+  elif isinstance(
+      optimization_parameters,
+      tf.compat.v1.tpu.experimental.StochasticGradientDescentParameters):
+    return None
+  else:
+    raise ValueError('Support to infer full variable name '
+                     'for optimization_parameter {} has not been added.'.format(
+                         optimization_parameters))
+
+
+def get_full_variable_names(graph,
+                            table_to_config_dict,
+                            optimization_parameters=None):
+  """Return embedding variable names and slot variables which are consistent with CPU runs."""
+  collection = graph.get_collection_ref(tpu_fc._TPU_FC_TO_SCOPE)  # pylint: disable=protected-access
+  if not collection:
+    raise RuntimeError(
+        'Embedding feature column did not capture any thing. Make sure the '
+        'feature columns passed to TPUEstimator constructor is properly '
+        'used in model_fn.')
+
+  embedding_variable_name_by_table = {}
+  slot_variable_names_by_table = {}
+  for table_name in table_to_config_dict:
+    embedding_var_name = _get_embedding_var_name_from_table_name(table_name)
+    (scope_name, var_name) = collection[0][embedding_var_name]
+    embedding_variable_name_by_table[table_name] = (
+        _get_embedding_variable_name(scope_name, var_name))
+    if optimization_parameters:
+      slot_variable_names_by_table[table_name] = _get_slot_variable_names(
+          scope_name, var_name, optimization_parameters)
+
+  graph.clear_collection(tpu_fc._TPU_FC_TO_SCOPE)  # pylint: disable=protected-access
+  return embedding_variable_name_by_table, slot_variable_names_by_table
+
+
+def get_configs_from_feature_columns(feature_columns):
+  """Create configs for TPUEmbedding etc from a list of feature columns.
+
+  Args:
+    feature_columns: a list of supported feature columns.
+
+  Returns:
+    A tuple of dicts, the first maps tables to their config, the second maps
+    features to their config, the third maps learning rate key to callback that
+    takes global step and outputs dynamic learning rate.
+  """
+
+  allowed = (
+      tpu_fc_v2._TPUEmbeddingColumnV2,  # pylint: disable=protected-access
+      tpu_fc_v2._TPUSharedEmbeddingColumnV2)  # pylint: disable=protected-access
+  warn = (tpu_fc._TPUEmbeddingColumn, tpu_fc._TPUSharedEmbeddingColumn)  # pylint: disable=protected-access
+
+  for column in feature_columns:
+    if not isinstance(column, allowed + warn):
+      raise TypeError(
+          'Unsupported feature column {}. Supported types are {}.'.format(
+              type(column), allowed))
+    if isinstance(column, warn):
+      tf.compat.v1.logging.warn(
+          'Columns of type {} are deprecated. Supported types are {}.'.format(
+              type(column), allowed))
+
+  table_to_config = {}
+  feature_to_config = {}
+  for column in feature_columns:
+    feature_name = column.get_feature_key_name()
+    table_name = _get_table_name_from_embedding_var_name(
+        column.get_embedding_var_name())
+    if feature_name in feature_to_config:
+      raise ValueError(
+          'Feature column {} is used with multiple embeddings and this is '
+          'not supported.'.format(feature_name))
+    feature_to_config[feature_name] = tpu_embedding.FeatureConfig(
+        table_id=table_name,
+        max_sequence_length=column.get_max_sequence_length(),
+        weight_key=column.get_weight_key_name())
+    vocabulary_size, dimension = column.get_embedding_table_size()
+    table_to_config[table_name] = tpu_embedding.TableConfig(
+        vocabulary_size=vocabulary_size,
+        dimension=dimension,
+        initializer=column.get_initializer(),
+        combiner=column.get_combiner(),
+        learning_rate_fn=column.get_learning_rate_fn())
+
+  return table_to_config, feature_to_config
+
+
+@estimator_export(v1=['estimator.tpu.experimental.EmbeddingConfigSpec'])
+class EmbeddingConfigSpec(
+    collections.namedtuple('EmbeddingConfigSpec', [
+        'feature_columns', 'tensor_core_feature_columns',
+        'optimization_parameters', 'clipping_limit',
+        'pipeline_execution_with_tensor_core',
+        'experimental_gradient_multiplier_fn', 'feature_to_config_dict',
+        'table_to_config_dict', 'partition_strategy', 'profile_data_directory'
+    ])):
+  """Class to keep track of the specification for TPU embeddings.
+
+  Pass this class to `tf.estimator.tpu.TPUEstimator` via the
+  `embedding_config_spec` parameter. At minimum you need to specify
+  `feature_columns` and `optimization_parameters`. The feature columns passed
+  should be created with some combination of
+  `tf.tpu.experimental.embedding_column` and
+  `tf.tpu.experimental.shared_embedding_columns`.
+
+  TPU embeddings do not support arbitrary Tensorflow optimizers and the
+  main optimizer you use for your model will be ignored for the embedding table
+  variables. Instead TPU embeddigns support a fixed set of predefined optimizers
+  that you can select from and set the parameters of. These include adagrad,
+  adam and stochastic gradient descent. Each supported optimizer has a
+  `Parameters` class in the `tf.tpu.experimental` namespace.
+
+  ```
+  column_a = tf.feature_column.categorical_column_with_identity(...)
+  column_b = tf.feature_column.categorical_column_with_identity(...)
+  column_c = tf.feature_column.categorical_column_with_identity(...)
+  tpu_shared_columns = tf.tpu.experimental.shared_embedding_columns(
+      [column_a, column_b], 10)
+  tpu_non_shared_column = tf.tpu.experimental.embedding_column(
+      column_c, 10)
+  tpu_columns = [tpu_non_shared_column] + tpu_shared_columns
+  ...
+  def model_fn(features):
+    dense_features = tf.keras.layers.DenseFeature(tpu_columns)
+    embedded_feature = dense_features(features)
+    ...
+
+  estimator = tf.estimator.tpu.TPUEstimator(
+      model_fn=model_fn,
+      ...
+      embedding_config_spec=tf.estimator.tpu.experimental.EmbeddingConfigSpec(
+          column=tpu_columns,
+          optimization_parameters=(
+              tf.estimator.tpu.experimental.AdagradParameters(0.1))))
+  ```
+  """
+
+  def __new__(cls,
+              feature_columns=None,
+              optimization_parameters=None,
+              clipping_limit=None,
+              pipeline_execution_with_tensor_core=False,
+              experimental_gradient_multiplier_fn=None,
+              feature_to_config_dict=None,
+              table_to_config_dict=None,
+              partition_strategy='div',
+              profile_data_directory=None):
+    """Creates an `EmbeddingConfigSpec` instance.
+
+    Args:
+      feature_columns: All embedding `FeatureColumn`s used by model.
+      optimization_parameters: An instance of `AdagradParameters`,
+        `AdamParameters` or `StochasticGradientDescentParameters`. This
+        optimizer will be applied to all embedding variables specified by
+        `feature_columns`.
+      clipping_limit: (Optional) Clipping limit (absolute value).
+      pipeline_execution_with_tensor_core: setting this to `True` makes training
+        faster, but trained model will be different if step N and step N+1
+        involve the same set of embedding IDs. Please see
+        `tpu_embedding_configuration.proto` for details.
+      experimental_gradient_multiplier_fn: (Optional) A Fn taking global step as
+        input returning the current multiplier for all embedding gradients.
+      feature_to_config_dict: A dictionary mapping feature names to instances of
+        the class `FeatureConfig`. Either features_columns or the pair of
+        `feature_to_config_dict` and `table_to_config_dict` must be specified.
+      table_to_config_dict: A dictionary mapping feature names to instances of
+        the class `TableConfig`. Either features_columns or the pair of
+        `feature_to_config_dict` and `table_to_config_dict` must be specified.
+      partition_strategy: A string, determining how tensors are sharded to the
+        tpu hosts. See `tf.nn.safe_embedding_lookup_sparse` for more details.
+        Allowed value are `"div"` and `"mod"'. If `"mod"` is used, evaluation
+        and exporting the model to CPU will not work as expected.
+      profile_data_directory: Directory where embedding lookup statistics are
+        stored. These statistics summarize information about the inputs to the
+        embedding lookup operation, in particular, the average number of
+        embedding IDs per example and how well the embedding IDs are load
+        balanced across the system. The lookup statistics are used during TPU
+        initialization for embedding table partitioning. Collection of lookup
+        statistics is done at runtime by  profiling the embedding inputs: only
+        3% of input samples are profiled to minimize host CPU overhead. Once
+        a suitable number of samples are profiled, the lookup statistics are
+        saved to table-specific files in the profile data directory generally
+        at the end of a TPU training loop. The filename corresponding to each
+        table is obtained by hashing table specific parameters (e.g., table
+        name and number of features) and global configuration parameters (e.g.,
+        sharding strategy and task count). The same profile data directory can
+        be shared among several models to reuse embedding lookup statistics.
+
+    Returns:
+      An `EmbeddingConfigSpec` instance.
+
+    Raises:
+      ValueError: If the feature_columns are not specified.
+      TypeError: If the feature columns are not of ths correct type (one of
+        _SUPPORTED_FEATURE_COLUMNS, _TPU_EMBEDDING_COLUMN_CLASSES OR
+        _EMBEDDING_COLUMN_CLASSES).
+      ValueError: If `optimization_parameters` is not one of the required types.
+    """
+    if (not feature_columns and
+        not (feature_to_config_dict and table_to_config_dict) or
+        (feature_columns and
+         (feature_to_config_dict and table_to_config_dict))):
+      raise ValueError('Exactly one of `feature_columns` and the pair '
+                       '`feature_to_config_dict` and `table_to_config_dict` '
+                       'must be be specified.')
+
+    if partition_strategy not in ('div', 'mod'):
+      raise ValueError('Invalid partition_strategy {}. Must be one of "mod" or '
+                       '"div".'.format(partition_strategy))
+
+    tensor_core_feature_columns = None
+    embedding_core_feature_columns = None
+    if feature_columns:
+      tensor_core_feature_columns = []
+      embedding_core_feature_columns = []
+      # It is unknown at this moment, whether the TPUEstimator is running in CPU
+      # or TPU mode. So allow non-TPU embedding columns also.
+      supported_classes = tuple(
+          list(_SUPPORTED_FEATURE_COLUMNS) +
+          list(_TPU_EMBEDDING_COLUMN_CLASSES) + list(_EMBEDDING_COLUMN_CLASSES))
+
+      for column in feature_columns:
+        if (isinstance(column, _TPU_DEVICE_SPECIFIC_EMBEDDING_COLUMNS) and
+            (column._embedding_lookup_device ==  # pylint: disable=protected-access
+             tpu_fc_v2.EmbeddingDevice.TPU_TENSOR_CORE)):
+          tensor_core_feature_columns.append(column)
+        else:
+          embedding_core_feature_columns.append(column)
+        if not isinstance(column, supported_classes):
+          raise TypeError(
+              'All feature columns must be supported types in {}. Got {}'
+              .format(supported_classes, type(column)))
+
+      if not isinstance(optimization_parameters, _SUPPORTED_OPTIMIZERS):
+        raise ValueError('optimization_parameters must be an instance of type '
+                         '{}. Got {}.'.format(_SUPPORTED_OPTIMIZERS,
+                                              type(optimization_parameters)))
+    else:
+      for feature, config in feature_to_config_dict.items():
+        if not isinstance(config, tpu_embedding.FeatureConfig):
+          raise TypeError(
+              'Config for feature {} must be of type `FeatureConfig`. Got {}'
+              .format(feature, type(config)))
+        if config.table_id not in table_to_config_dict:
+          raise ValueError('Feature {} refers to table {} which is not in the '
+                           'table_to_config_dict.'.format(
+                               feature, config.table_id))
+      for table, config in table_to_config_dict.items():
+        if not isinstance(config, tpu_embedding.TableConfig):
+          raise TypeError(
+              'Config for table {} must be of type `TableConfig`. Got '
+              '{}'.format(table, type(config)))
+
+    return super(EmbeddingConfigSpec, cls).__new__(
+        cls,
+        feature_columns=embedding_core_feature_columns,
+        tensor_core_feature_columns=tensor_core_feature_columns,
+        optimization_parameters=optimization_parameters,
+        clipping_limit=clipping_limit,
+        pipeline_execution_with_tensor_core=pipeline_execution_with_tensor_core,
+        experimental_gradient_multiplier_fn=experimental_gradient_multiplier_fn,
+        feature_to_config_dict=feature_to_config_dict,
+        table_to_config_dict=table_to_config_dict,
+        partition_strategy=partition_strategy,
+        profile_data_directory=profile_data_directory)
+
+
+class EmbeddingConfig(object):
+  """This is the internal immutable object for embedding config.
+
+  `_EmbeddingConfig` is responsible to _translate_ user provided
+  `EmbeddingConfigSpec` to internal data structures, mostly constructor
+  arguments of `TPUEmbedding`.
+  """
+
+  def __init__(self, embedding_config_spec, train_batch_size, eval_batch_size,
+               num_hosts, num_cores, run_config):
+    if not embedding_config_spec:
+      raise ValueError('embedding_config_spec cannot be None.')
+
+    self._embedding_config_spec = embedding_config_spec
+    self._train_batch_size = train_batch_size
+    self._eval_batch_size = eval_batch_size
+    self._num_hosts = num_hosts
+    self._num_cores = num_cores
+    self._run_config = run_config
+
+    if embedding_config_spec.feature_columns:
+      self._table_to_config_dict, self._feature_to_config_dict = (
+          get_configs_from_feature_columns(
+              embedding_config_spec.feature_columns))
+    else:
+      self._table_to_config_dict = embedding_config_spec.table_to_config_dict
+      self._feature_to_config_dict = embedding_config_spec.feature_to_config_dict
+    self._partition_strategy = embedding_config_spec.partition_strategy
+    self._mode_to_tpu_embedding_dict = {}
+    self.dummy_table_variables = None
+
+    self._grad_multiplier_fn = (
+        embedding_config_spec.experimental_gradient_multiplier_fn)
+
+  def get_grad_multiplier(self):
+    if self._grad_multiplier_fn:
+      return ops.convert_to_tensor(
+          self._grad_multiplier_fn(tf.compat.v1.train.get_global_step()),
+          dtype=tf.dtypes.float32)
+
+  def has_embedding_tables(self):
+    return bool(self._table_to_config_dict)
+
+  def _create_tpu_embedding(self, mode):
+    """Create tpu_embedding.TPUEmbedding based on mode."""
+    if mode == model_fn_lib.ModeKeys.TRAIN:
+      batch_size = self._train_batch_size
+    else:
+      batch_size = self._eval_batch_size
+
+    if mode == model_fn_lib.ModeKeys.TRAIN:
+      tpu_embedding_mode = tpu_embedding.TRAINING
+      optimization_parameters = (
+          self._embedding_config_spec.optimization_parameters)
+    elif (mode == model_fn_lib.ModeKeys.EVAL or
+          mode == model_fn_lib.ModeKeys.PREDICT):
+      tpu_embedding_mode = tpu_embedding.INFERENCE
+      optimization_parameters = None
+    else:
+      raise ValueError('Mode {} is not supported.'.format(mode))
+
+    if self._run_config.cluster:
+      master = self._run_config.cluster.master()
+      cluster_spec = self._run_config.cluster.cluster_spec()
+      cluster_def = cluster_spec.as_cluster_def() if cluster_spec else None
+    else:
+      master = (
+          self._run_config.evaluation_master
+          if mode == model_fn_lib.ModeKeys.EVAL else self._run_config.master)
+      cluster_def = None
+    master_job_name = None
+    if self._run_config.tpu_config.tpu_job_name is not None:
+      master_job_name = self._run_config.tpu_config.tpu_job_name
+    tpu_embedding_ = tpu_embedding.TPUEmbedding(
+        self._table_to_config_dict,
+        self._feature_to_config_dict,
+        batch_size,
+        tpu_embedding_mode,
+        master,
+        optimization_parameters,
+        cluster_def,
+        pipeline_execution_with_tensor_core=self._embedding_config_spec
+        .pipeline_execution_with_tensor_core,
+        partition_strategy=self._partition_strategy,
+        profile_data_directory=self._embedding_config_spec
+        .profile_data_directory,
+        master_job_name=master_job_name)
+    return tpu_embedding_
+
+  def get_tpu_embedding(self, mode):
+    if mode not in self._mode_to_tpu_embedding_dict:
+      self._mode_to_tpu_embedding_dict[mode] = (
+          self._create_tpu_embedding(mode))
+    return self._mode_to_tpu_embedding_dict[mode]
+
+
+def _maybe_dense_to_sparse(tensor):
+  """Possibly convert a dense (rank 1 or 2) tensor to a SparseTensor."""
+  # If already sparse, return as is.
+  if isinstance(tensor, tf.sparse.SparseTensor):
+    return tensor
+  indices = tf.compat.v1.where(tensor)
+  values = tf.compat.v1.gather_nd(tensor, indices)
+  shape = tf.compat.v1.shape(tensor, out_type=tf.dtypes.int64)
+  return tf.sparse.SparseTensor(indices, values, shape)
+
+
+def split_inputs(ctx, features, labels, num_cores_per_batch=1):
+  """Splits the dense and sparse tensors inside the features and labels."""
+  enqueue_datas = collections.OrderedDict()
+
+  if ctx.embedding_config:
+    tpu_embedding_ = ctx.embedding_config.tpu_embedding
+    for feature_key in tpu_embedding_.feature_to_config_dict:
+      sparse_feature = _get_sparse_feature_from_feature(feature_key, features)
+      max_sequence_length = tpu_embedding_.feature_to_config_dict[
+          feature_key].max_sequence_length
+      combiner = tpu_embedding_._table_to_config_dict[
+          tpu_embedding_._feature_to_config_dict[feature_key].table_id].combiner
+      if max_sequence_length > 0:
+        length_feature_name = (
+            tpu_fc.get_sequence_length_feature_key_name_from_feature_key_name(
+                feature_key))
+        length_feature = tf.math.minimum(
+            fc_utils.sequence_length_from_sparse_tensor(sparse_feature),
+            max_sequence_length)
+        length_feature.set_shape(ctx.batch_size_for_input_fn)
+        features[length_feature_name] = length_feature
+      weight_key = tpu_embedding_.feature_to_config_dict[feature_key].weight_key
+      sparse_feature_split = _split_tensor(sparse_feature, num_cores_per_batch)
+      if combiner is None and not isinstance(sparse_feature,
+                                             tf.sparse.SparseTensor):
+        # A dense tensor with no combiner was provided so we assume that each
+        # of the embedding_indices belongs to a different sample (setting
+        # sample_indices to None).
+        if weight_key is not None:
+          raise ValueError(
+              'Found weights {} for weighted_categorical_column, which is not'
+              'compatible with sparse feature {} enqueued as dense tensor.'
+              .format(weight_key, feature_key))
+        enqueue_data = []
+        for i in range(num_cores_per_batch):
+          enqueue_data.append(
+              tpu_embedding.EnqueueData(sparse_feature_split[i]))
+      else:
+        weights = None
+        if isinstance(sparse_feature, tf.sparse.SparseTensor):
+          weights = _get_weights_from_features(weight_key, features)
+          weights_split = _split_tensor(weights, num_cores_per_batch)
+        enqueue_data = []
+        for i in range(num_cores_per_batch):
+          split_weights = weights_split[i] if weights else None
+          enqueue_data.append(
+              tpu_embedding.EnqueueData.from_sparse_tensor(
+                  _maybe_dense_to_sparse(sparse_feature_split[i]),
+                  weights=split_weights))
+      enqueue_datas[feature_key] = enqueue_data
+  if ctx.tensor_core_embedding_columns:
+    # pylint: disable=protected-access
+    for column in ctx.tensor_core_embedding_columns:
+      feature_key = column.categorical_column.key
+      sparse_feature = _get_sparse_feature_from_feature(feature_key, features)
+      padded_values, padded_mask = (
+          tpu_fc_v2.pad_sparse_embedding_lookup_indices(
+              sparse_feature, column._tensor_core_shape[1]))
+      padded_values.set_shape(
+          [ctx.batch_size_for_input_fn, column._tensor_core_shape[1]])
+      padded_mask.set_shape(
+          [ctx.batch_size_for_input_fn, column._tensor_core_shape[1]])
+      features[feature_key] = padded_values
+      mask_key = feature_key + tpu_fc_v2._TENSOR_CORE_MASK_KEY_SUFFIX
+      if mask_key in features:
+        raise ValueError('Mask key {} for Tensor Core embedding is '
+                         'already in use.'.format(mask_key))
+      features[mask_key] = padded_mask
+    # pylint: enable=protected-access
+
+  # Transpose the enqueue_datas dict into a list of dicts
+  enqueue_datas_list = []
+  for i in range(num_cores_per_batch):
+    enqueue_data = {}
+    for key, value in enqueue_datas.items():
+      enqueue_data[key] = value[i]
+    enqueue_datas_list.append(enqueue_data)
+  return features, labels, enqueue_datas_list
+
+
+def _split_tensor(tensor, num_splits):
+  """Splits tensor into num_splits pieces, returns a list of pieces."""
+  if tensor is None:
+    return [None] * num_splits
+  elif num_splits <= 0:
+    return ValueError(
+        'Tensors cannot be split into {} pieces.'.format(num_splits))
+  elif num_splits == 1:
+    return [tensor]
+  elif isinstance(tensor, tf.sparse.SparseTensor):
+    return sparse_ops.sparse_split_v2(tensor, num_splits, axis=0)
+  else:
+    return tf.split(tensor, num_splits)
+
+
+def _get_sparse_feature_from_feature(feature_key, features):
+  """Pop and return sparse feature."""
+  sparse_feature = features.pop(feature_key)
+  if not sparse_feature.dtype.is_integer:
+    raise ValueError('SparseTensor with string as values are not supported. '
+                     'If you are using categorical_column_with_vocabulary_file '
+                     'or categorical_column_with_vocabulary_list, please call '
+                     'your_column.categorical_column._transform_feature({{'
+                     'your_column.key: features[your_column.key]}}) in '
+                     'your input_fn() to convert string to int. '
+                     'feature_key = {}.'.format(feature_key))
+  return sparse_feature
+
+
+def _get_weights_from_features(weight_key_name, features):
+  """Pop and return feature for weights, possibly None."""
+  weights = None
+  if weight_key_name is not None:
+    if weight_key_name in features:
+      weights = features.pop(weight_key_name)
+    else:
+      raise ValueError(
+          'Cannot find weights {} for weighted_categorical_column.'
+          ' Please check if the weights are present in feature dict. Also'
+          ' note weight-sharing among weighted_categorical_column is not '
+          'supported on TPU.'.format(weight_key_name))
+    if not isinstance(weights, tf.sparse.SparseTensor):
+      raise ValueError(
+          'weighted_categorical_column with weight key name {} has dense '
+          'weights. Dense weights are not supported on TPU. Please use '
+          'sparse weights instead.'.format(weight_key_name))
+    if weights.dtype is not tf.dtypes.float32:
+      weights = tf.cast(weights, dtype=tf.dtypes.float32)
+  return weights
+
+
+def get_tpu_embedding_columns(feature_columns):
+  """Get feature columns meant to use TPU embedding.
+
+  Args:
+    feature_columns: a list of feature columns.
+
+  Returns:
+    A list of feature columns which can be placed on TPU embedding.
+  """
+  tpu_embedding_columns = []
+  for column in feature_columns:
+    if isinstance(column, _TPU_EMBEDDING_COLUMN_CLASSES):
+      tpu_embedding_columns.append(column)
+  return tpu_embedding_columns
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/error_handling.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/error_handling.py
new file mode 100644
index 00000000..97e0e27d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/error_handling.py
@@ -0,0 +1,154 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""ErrorRendezvous handler for collecting errors from multiple threads."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import contextlib
+import sys
+import threading
+import time
+
+import six
+import tensorflow as tf
+from tensorflow_estimator.python.estimator.tools import analytics
+
+_UNINTERESTING_ERRORS = (tf.errors.CancelledError,)
+_IGNORED_ERRORS = (
+    tf.errors.AbortedError,
+    tf.errors.UnavailableError,
+)
+
+_CHECK_NUMERIC_OP_NAME = 'CheckNumerics'
+
+
+class ErrorRendezvous(object):
+  """Resolve errors from multiple threads during TPU execution.
+
+  TPU errors can occur on the infeed or outfeed threads as well as the main
+  training thread.
+
+  Depending on which thread "wins" and receives the session error first, we may
+  end up showing users a confusing and non-actionable error message (session
+  cancelled) instead of a root cause (e.g. a bad filename).
+
+  The rendezvous object provides a location to capture these errors until all
+  threads terminate.  At that point we can choose the most informative error
+  to report.
+  """
+
+  def __init__(self, num_sources):
+    # string -> (message, traceback)
+    self._errors = {}
+    self._num_sources = num_sources
+    self._session_cancel_timer = None
+
+  def record_error(self, source, exc_info, session=None):
+    """Report an exception from the given source.
+
+    If a session is passed, a timer will be registered to close it after a few
+    seconds.  This is necessary to ensure the main training loop does not hang
+    if an infeed/oufeed error occurs.  We sleep a few seconds to allow a more
+    interesting error from another thread to propagate.
+
+    Args:
+      source: string, source of the error
+      exc_info: Output from `sys.exc_info` (type, value, traceback)
+      session: Session to close after delay.
+    """
+    _, value, _ = exc_info
+    # Ignore errors already handled by MonitoredSession
+    if isinstance(value, _IGNORED_ERRORS):
+      return
+
+    self._errors[source] = exc_info
+
+    # If the error is a numeric type, e.g., NaN error, we can assume that the
+    # loop execution completed successfully. In this case, we can skip the
+    # `session.close()` logic and wait for the infeed/outfeed threads to
+    # complete as normal.
+    try:
+      if value.op.type == _CHECK_NUMERIC_OP_NAME:
+        analytics.track_numerical_issues(exc_info)
+        return
+    except AttributeError as _:
+      pass
+
+    if session is not None and self._session_cancel_timer is None:
+
+      def _cancel_session():
+        time.sleep(5)
+        tf.compat.v1.logging.error('Closing session due to error %s' % value)
+        try:
+          session.close()
+        except:  # pylint: disable=bare-except
+          tf.compat.v1.logging.error(
+              '\n\n\nFailed to close session after error.'
+              'Other threads may hang.\n\n\n')
+
+      self._session_cancel_timer = threading.Thread(target=_cancel_session,)
+      self._session_cancel_timer.daemon = True
+      self._session_cancel_timer.start()
+
+  def record_done(self, source):
+    """Mark execution source `source` as done.
+
+    If an error was originally reported from `source` it is left intact.
+
+    Args:
+      source: `str`, source being recorded
+    """
+    tf.compat.v1.logging.info('%s marked as finished', source)
+    if source not in self._errors:
+      self._errors[source] = None
+
+  @contextlib.contextmanager
+  def catch_errors(self, source, session=None):
+    """Context manager to report any errors within a block."""
+    try:
+      yield
+    except Exception:  # pylint: disable=broad-except
+      self.record_error(source, sys.exc_info(), session)
+
+  def raise_errors(self, timeout_sec=0):
+    """Wait for up to `timeout` seconds for all error sources to finish.
+
+    Preferentially raise "interesting" errors (errors not in the
+    _UNINTERESTING_ERRORS) set.
+
+    Args:
+      timeout_sec: Seconds to wait for other error sources.
+    """
+    for _ in range(timeout_sec):
+      if len(self._errors) == self._num_sources:
+        break
+      time.sleep(1)
+
+    kept_errors = [(k, v) for (k, v) in self._errors.items() if v is not None]
+
+    # First check for any interesting errors, then fall back on the session
+    # cancelled errors etc.
+    for k, (typ, value, traceback) in kept_errors:
+      if isinstance(value, _UNINTERESTING_ERRORS):
+        continue
+      else:
+        tf.compat.v1.logging.warn('Reraising captured error')
+        six.reraise(typ, value, traceback)
+
+    for k, (typ, value, traceback) in kept_errors:
+      tf.compat.v1.logging.warn('Reraising captured error')
+      six.reraise(typ, value, traceback)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/iteration_count_estimator.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/iteration_count_estimator.py
new file mode 100644
index 00000000..ea231fb1
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/iteration_count_estimator.py
@@ -0,0 +1,201 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+"""Estimator that uses past runtime samples to estimate iterations count.
+
+The estimator helps simplify determining the number of iterations count to spend
+on a given alloted time budget. The estimate will get adjusted over time as the
+estimator learns more from collecting per iteration runtime samples.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+import numpy as np
+import tensorflow as tf
+
+RuntimeCounter = collections.namedtuple(
+    "RuntimeCounter", ["runtime_secs", "steps", "step_time_secs"])
+
+
+class IterationCountEstimator(object):
+  """Estimates iterations count using past iterations runtime.
+
+  The estimator collects iterations elapsed time (in seconds) and store it into
+  a circular buffer. As it learns enough samples, it computes the mean value of
+  the past observed iterations elapsed time to estimate the number of iterations
+  count to run within the alloted time budget in seconds.
+
+  To keep the buffer from growing indefinitely, we limit the size by the virtue
+  of using circular buffer. As it uses the mean of iterations runtime to compute
+  the iterations count estimate, setting a larger buffer size will smooth out
+  the estimation. Once the buffer is getting filled up, older values will be
+  dequeued in FIFO order. Setting larger buffer size will make the estimator
+  less sensitive to runtime fluctuations but will result in slower convergence.
+  For faster convergence buffer size can be set smaller but more prone to
+  runtime fluctuations.
+
+  As a safety feature, the estimator will return default iterations value,
+  when:
+  1. The circular buffer is empty (initially).
+  2. The user input is invalid.
+  """
+
+  def __init__(self, capacity=20):
+    """Constructs a new `IterationsEstimator` instance.
+
+    Args:
+      capacity: Size of circular buffer to hold timer values. Each timer value
+        represents the time spent on the last iterations.
+
+    Raises:
+      ValueError: If one or more parameters specified is invalid.
+    """
+    self._reset(capacity=capacity)
+
+  def _reset(self, capacity=20):
+    """Resets internal variables."""
+    if capacity <= 0:
+      raise ValueError("IterationCountEstimator `capacity` must be positive. "
+                       "Actual:%d." % capacity)
+    # A circular buffer with fixed capacity to store the observation time values
+    # and once the buffer is full, the oldest value will be evicted.
+    self._buffer_wheel = collections.deque([])
+    self._capacity = capacity
+    self._min_iterations = 1
+    self._last_iterations = self._min_iterations
+    self._sample_count = 0
+
+  def _mean_runtime_secs(self):
+    return np.mean(self._buffer_wheel, axis=0)[0] if self._buffer_wheel else 0
+
+  def _mean_step_time_secs(self):
+    return np.mean(self._buffer_wheel, axis=0)[2] if self._buffer_wheel else 0
+
+  def _std_step_time_secs(self):
+    return np.std(self._buffer_wheel, axis=0)[2] if self._buffer_wheel else 0
+
+  def _diff_less_than_percentage(self, actual, target, percentage):
+    """Checks if `actual` value is within a `percentage` to `target` value.
+
+    Args:
+      actual: Actual value.
+      target: Target value.
+      percentage: Max percentage threshold.
+
+    Returns:
+      True if the ABS(`actual` - `target`) is less than or equal to `percentage`
+        , otherwise False.
+
+    Raise:
+      ValueError: If `total_secs` value is not positive.
+    """
+    if actual == 0:
+      raise ValueError("Invalid `actual` value. Value must not be zero.")
+    if target == 0:
+      raise ValueError("Invalid `target` value. Value must not be zero.")
+    return (float(abs(target - actual)) / target) <= percentage * 0.01
+
+  def _is_step_time_stable(self):
+    """Checks if the step time has stabilized.
+
+    We define stability a function of small stdev and after running for some
+    time.
+
+    Returns:
+      True if stability is reached, False otherwise.
+    """
+    std = self._std_step_time_secs()
+    return std < 0.03 and self._sample_count > self._capacity
+
+  def update(self, runtime_secs, count):
+    """Updates the unit time spent per iteration.
+
+    Args:
+      runtime_secs: The total elapsed time in seconds.
+      count: The number of iterations.
+    """
+    if runtime_secs <= 0.0:
+      tf.compat.v1.logging.debug(
+          "Invalid `runtime_secs`. Value must be positive. Actual:%.3f.",
+          runtime_secs)
+      return
+    if count <= 0.0:
+      tf.compat.v1.logging.debug(
+          "Invalid samples `count`. Value must be positive. Actual:%d.", count)
+      return
+
+    if len(self._buffer_wheel) >= self._capacity:
+      self._buffer_wheel.popleft()
+    step_time_secs = float(runtime_secs) / count
+    self._buffer_wheel.append(
+        RuntimeCounter(
+            runtime_secs=runtime_secs,
+            steps=count,
+            step_time_secs=step_time_secs))
+    self._sample_count += 1
+
+  def get(self, total_secs):
+    """Gets the iterations count estimate.
+
+    If recent predicted iterations are stable, re-use the previous value.
+    Otherwise, update the prediction value based on the delta between the
+    current prediction and the expected number of iterations as determined by
+    the per-step runtime.
+
+    Args:
+      total_secs: The target runtime in seconds.
+
+    Returns:
+      The number of iterations as estimate.
+
+    Raise:
+      ValueError: If `total_secs` value is not positive.
+    """
+    if total_secs <= 0:
+      raise ValueError(
+          "Invalid `total_secs`. It must be positive number. Actual:%d" %
+          total_secs)
+    if not self._buffer_wheel:
+      tf.compat.v1.logging.debug(
+          "IterationCountEstimator has no sample(s). Returns min iterations:%d.",
+          self._min_iterations)
+      return self._min_iterations
+
+    mean_runtime_secs = self._mean_runtime_secs()
+    mean_step_time_secs = self._mean_step_time_secs()
+    std_step_time_secs = self._std_step_time_secs()
+    projected_iterations = total_secs / mean_step_time_secs
+    last_runtime_secs = self._buffer_wheel[-1].runtime_secs
+    delta_iterations = projected_iterations - self._last_iterations
+    # Stabilizes the search once it is close enough to the target runtime and
+    # the step time is stable within range bound.
+    if ((self._diff_less_than_percentage(last_runtime_secs, total_secs, 10) or
+         self._diff_less_than_percentage(mean_runtime_secs, total_secs, 5)) and
+        self._is_step_time_stable()):
+      delta_iterations = 0
+    self._last_iterations += delta_iterations
+    self._last_iterations = max(self._last_iterations, self._min_iterations)
+    tf.compat.v1.logging.info(
+        "IterationCountEstimator -- target_runtime:%.3fs. last_runtime:%.3fs. "
+        "mean_runtime:%.3fs. last_step_time:%.3f. std_step_time:%.3f. "
+        "mean_step_time:%.3fs. delta_steps:%.2f. prev_steps:%.2f. "
+        "next_steps:%.2f.", total_secs, last_runtime_secs, mean_runtime_secs,
+        self._buffer_wheel[-1].step_time_secs, std_step_time_secs,
+        mean_step_time_secs, delta_iterations, self._buffer_wheel[-1].steps,
+        self._last_iterations)
+    return int(self._last_iterations + 0.5)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_config.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_config.py
new file mode 100644
index 00000000..06427feb
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_config.py
@@ -0,0 +1,329 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""A RunConfig subclass with TPU support."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import json
+import os
+import tensorflow as tf
+from tensorflow.core.protobuf import config_pb2
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import run_config as run_config_lib
+from tensorflow_estimator.python.estimator.tpu import util as util_lib
+
+# pylint: disable=protected-access
+_TF_CONFIG_ENV = run_config_lib._TF_CONFIG_ENV
+_SERVICE_KEY = run_config_lib._SERVICE_KEY
+_TPU_WORKER_JOB_NAME = 'tpu_worker_job_name'
+# pylint: enable=protected-access
+
+
+@estimator_export(v1=['estimator.tpu.InputPipelineConfig'])
+class InputPipelineConfig(object):
+  r"""Please see the definition of these values in TPUConfig."""
+  PER_SHARD_V1 = 1
+  PER_HOST_V1 = 2
+  PER_HOST_V2 = 3
+  BROADCAST = 4
+  SLICED = 5
+
+
+@estimator_export(v1=['estimator.tpu.TPUConfig'])
+class TPUConfig(
+    collections.namedtuple('TPUConfig', [
+        'iterations_per_loop',
+        'num_shards',
+        'num_cores_per_replica',
+        'per_host_input_for_training',
+        'tpu_job_name',
+        'initial_infeed_sleep_secs',
+        'input_partition_dims',
+        'eval_training_input_configuration',
+        'experimental_host_call_every_n_steps',
+        'experimental_allow_per_host_v2_parallel_get_next',
+        'experimental_feed_hook',
+    ])):
+  r"""TPU related configuration required by `TPUEstimator`.
+
+  Args:
+    iterations_per_loop: This is the number of train steps running in TPU system
+      before returning to CPU host for each `Session.run`. This means global
+      step is increased `iterations_per_loop` times in one `Session.run`. It is
+      recommended to be set as number of global steps for next checkpoint. Note
+      that in evaluation don't use this value, instead we run total eval `steps`
+      on TPU for a single `Session.run`.
+      [Experimental]: `iterations_per_loop` can be specified as a time interval.
+        To specify N seconds in one `Session.run`, one can specify it as `Ns`
+        and substitute the N with the N with the number of desired seconds.
+        Alternatively, the unit of time can also be specified in minutes or
+        hours, e.g. `3600s` or `60m` or `1h`.
+    num_shards: (Deprecated, ignored by TPUEstimator). The number of model
+      replicas in the system. For non-model-parallelism case, this number equals
+      the total number of TPU cores. For model-parallelism, the total number of
+      TPU cores equals num_cores_per_replica * num_shards.
+    num_cores_per_replica: Defaults to `None`, which disables model parallelism.
+      An integer which describes the number of TPU cores per model replica. This
+      is required by model-parallelism which enables partitioning the model to
+      multiple cores. Currently num_cores_per_replica must be 1, 2, 4, or 8.
+    per_host_input_for_training: If `True`, for `PER_HOST_V1`, the `input_fn` is
+      invoked once on each host, and the number of hosts must be smaller or
+      equal to the number of replicas. For PER_HOST_V2, the `input_fn` is
+      invoked once for each host (if the number of hosts is less than the number
+      of replicas) or replica (if the number of replicas is less than the number
+      of hosts. With the per-core input pipeline configuration, it is invoked
+      once for each core. With a global batch size `train_batch_size` in
+      `TPUEstimator` constructor, the batch size for each shard is
+      `train_batch_size` // #hosts in the `True` or `PER_HOST_V1` mode. In
+      `PER_HOST_V2` mode, it is `train_batch_size` // #cores. In `BROADCAST`
+      mode, `input_fn` is only invoked once on host 0 and the tensors are
+      broadcasted to all other replicas. The batch size equals to
+      `train_batch_size`. With the per-core input pipeline configuration, the
+      shard batch size is also `train_batch_size` // #cores.
+      Note: per_host_input_for_training==PER_SHARD_V1 only supports mode.TRAIN.
+    tpu_job_name: The name of the TPU job. Typically, this name is auto-inferred
+      within TPUEstimator, however when using ClusterSpec propagation in more
+      esoteric cluster configurations, you may need to specify the job name as a
+      string.
+    initial_infeed_sleep_secs: The number of seconds the infeed thread should
+      wait before enqueueing the first batch. This helps avoid timeouts for
+      models that require a long compilation time.
+    input_partition_dims: A nested list to describe the partition dims for all
+      the tensors from input_fn(). The structure of input_partition_dims must
+      match the structure of `features` and `labels` from input_fn(). The total
+      number of partitions must match
+      `num_cores_per_replica`. For example, if input_fn() returns two tensors:
+        images with shape [N, H, W, C] and labels [N]. input_partition_dims =
+        [[1, 2, 2, 1], None] will split the images to 4 pieces and feed into 4
+        TPU cores. labels tensor are directly broadcasted to all the TPU cores
+        since the partition dims is `None`.
+      Current limitations: This feature is only supported with the PER_HOST_V2
+        input mode.
+    eval_training_input_configuration: If `SLICED`, `input_fn` is only invoked
+      once on host 0 and the tensors are broadcasted to all other replicas.
+      Unlike per_host_input_for_training=BROADCAST, each replica will only get a
+      slice of the data instead of a whole copy. If `PER_HOST_V1`, the behaviour
+      is determined by per_host_input_for_training.
+    experimental_host_call_every_n_steps: Within a training loop, this argument
+      sets how often host calls are performed during training. Host calls will
+      be evaluated every n steps within a training loop where n is the value of
+      this argument.
+    experimental_allow_per_host_v2_parallel_get_next: When enabled, allows
+      concurrent execution of dataset get next calls when using PER_HOST_V2
+      input. May result in a performance increase for models with a small step
+      time, but as a consequence TPUEstimator may non-deterministically
+      distribute batches to different cores, rather than guaranteeing round
+      robin behavior.
+    experimental_feed_hook: This is a class which user can provide to the TPU
+      estimator to override the default TPUInfeedOutfeedSessionHook implementation
+      and add customized implementatioin to handle infeed outfeed logic. If
+      given class is None, TPU estimator uses default TPUInfeedOutfeedSessionHook
+      implementation in tpu_estimator.py. If not None, TPU estimator uses this
+      customized tpu infeed outfeed session hook class rather to override the
+      default one.
+
+  Raises:
+      ValueError: If `num_cores_per_replica` is not 1, 2, 4, 8, ..., 128.
+  """
+
+  def __new__(cls,
+              iterations_per_loop=2,
+              num_shards=None,
+              num_cores_per_replica=None,
+              per_host_input_for_training=True,
+              tpu_job_name=None,
+              initial_infeed_sleep_secs=None,
+              input_partition_dims=None,
+              eval_training_input_configuration=InputPipelineConfig.PER_HOST_V1,
+              experimental_host_call_every_n_steps=1,
+              experimental_allow_per_host_v2_parallel_get_next=False,
+              experimental_feed_hook=None):
+
+    # Check iterations_per_loop.
+    util_lib.parse_iterations_per_loop(iterations_per_loop)
+
+    # Check num_shards.
+    if num_shards is not None:
+      util_lib.check_positive_integer(num_shards, 'TPUConfig num_shards')
+
+    if input_partition_dims is not None:
+      if len(input_partition_dims) != 1 and len(input_partition_dims) != 2:
+        raise ValueError(
+            'input_partition_dims must be a list/tuple with one or two'
+            ' elements.')
+
+      if per_host_input_for_training is not InputPipelineConfig.PER_HOST_V2:
+        raise ValueError(
+            'input_partition_dims is only supported in PER_HOST_V2 mode.')
+
+      if num_cores_per_replica is None:
+        raise ValueError(
+            'input_partition_dims requires setting num_cores_per_replica.')
+
+    # Check num_cores_per_replica
+    if num_cores_per_replica is not None:
+      if num_cores_per_replica not in ([1, 2, 4, 8, 16, 32, 64, 128]):
+        raise ValueError(
+            'num_cores_per_replica must be 1, 2, 4, 8, 16, 32, 64, 128; '
+            'got {}'.format(str(num_cores_per_replica)))
+
+    if eval_training_input_configuration not in [
+        InputPipelineConfig.PER_HOST_V1, InputPipelineConfig.SLICED
+    ]:
+      raise ValueError(
+          'eval_training_input_configuration must be PER_HOST_V1 or SLICED;'
+          ' got {}'.format(str(eval_training_input_configuration)))
+
+    # per_host_input_for_training may be True, False, or integer in [1..3].
+    # Map legacy values (True, False) to numeric values.
+    if per_host_input_for_training is False:
+      per_host_input_for_training = InputPipelineConfig.PER_SHARD_V1
+    elif per_host_input_for_training is True:
+      per_host_input_for_training = InputPipelineConfig.PER_HOST_V1
+
+    # Check initial_infeed_sleep_secs.
+    if initial_infeed_sleep_secs:
+      util_lib.check_positive_integer(initial_infeed_sleep_secs,
+                                      'TPUConfig initial_infeed_sleep_secs')
+
+    tpu_job_name = tpu_job_name or _get_tpu_job_name_from_tf_config()
+
+    return super(TPUConfig, cls).__new__(
+        cls,
+        iterations_per_loop=iterations_per_loop,
+        num_shards=num_shards,
+        num_cores_per_replica=num_cores_per_replica,
+        per_host_input_for_training=per_host_input_for_training,
+        tpu_job_name=tpu_job_name,
+        initial_infeed_sleep_secs=initial_infeed_sleep_secs,
+        input_partition_dims=input_partition_dims,
+        eval_training_input_configuration=eval_training_input_configuration,
+        experimental_host_call_every_n_steps=(
+            experimental_host_call_every_n_steps),
+        experimental_allow_per_host_v2_parallel_get_next=(
+            experimental_allow_per_host_v2_parallel_get_next),
+        experimental_feed_hook=(experimental_feed_hook))
+
+
+@estimator_export(v1=['estimator.tpu.RunConfig'])
+class RunConfig(run_config_lib.RunConfig):
+  """RunConfig with TPU support."""
+
+  def __init__(self,
+               tpu_config=None,
+               evaluation_master=None,
+               master=None,
+               cluster=None,
+               **kwargs):
+    """Constructs a RunConfig.
+
+    Args:
+      tpu_config: the TPUConfig that specifies TPU-specific configuration.
+      evaluation_master: a string. The address of the master to use for eval.
+        Defaults to master if not set.
+      master: a string. The address of the master to use for training.
+      cluster: a ClusterResolver
+      **kwargs: keyword config parameters.
+
+    Raises:
+      ValueError: if cluster is not None and the provided session_config has a
+        cluster_def already.
+    """
+    super(RunConfig, self).__init__(**kwargs)
+    self._tpu_config = tpu_config or TPUConfig()
+    self._cluster = cluster
+
+    # If user sets master and/or evaluation_master explicitly, including empty
+    # string '', take it. Otherwise, take the values set by parent class.
+    if master is not None:
+      if cluster is not None:
+        raise ValueError('Both master and cluster are set.')
+      self._master = master
+    else:
+      if cluster:
+        self._master = cluster.master()
+
+    if evaluation_master is not None:
+      self._evaluation_master = evaluation_master
+    elif (not self._evaluation_master and
+          self.task_type != run_config_lib.TaskType.EVALUATOR):
+      # If the task type is EVALUATOR, it means some cluster manager sets the
+      # TF_CONFIG. In that case, we respect the configuration in TF_CONFIG.
+      #
+      # Otherwise, it means user executes the code without external cluster
+      # manager. For that, we optimize the user experience by setting
+      # evaluation_master to master, unless user overwrites it.
+      self._evaluation_master = self._master
+
+    # Set the ClusterSpec to use
+    if cluster:
+      self._cluster_spec = cluster.cluster_spec()
+
+      # Merge the cluster_def into the ConfigProto.
+      if self._session_config is None:  # pylint: disable=access-member-before-definition
+        self._session_config = config_pb2.ConfigProto(
+            allow_soft_placement=True, isolate_session_state=True)
+      if self._session_config.HasField('cluster_def'):
+        raise ValueError('You cannot provide a ClusterResolver and '
+                         'session_config.cluster_def.')
+      if self._cluster_spec:
+        self._session_config.cluster_def.CopyFrom(
+            self._cluster_spec.as_cluster_def())
+
+  def _maybe_overwrite_session_config_for_distributed_training(self):
+    # Overrides the parent class session_config overwrite for between-graph. TPU
+    # runs with in-graph, which should not have device filter. Doing nothing
+    # ("pass") basically disables it.
+    pass
+
+  @property
+  def evaluation_master(self):
+    return self._evaluation_master
+
+  @property
+  def master(self):
+    return self._master
+
+  @property
+  def tpu_config(self):
+    return self._tpu_config
+
+  @property
+  def cluster(self):
+    return self._cluster
+
+  def replace(self, **kwargs):
+    if 'tpu_config' not in kwargs:
+      return super(RunConfig, self).replace(**kwargs)
+
+    tpu_config = kwargs.pop('tpu_config')
+    new_instance = super(RunConfig, self).replace(**kwargs)
+    new_instance._tpu_config = tpu_config  # pylint: disable=protected-access
+    return new_instance
+
+
+def _get_tpu_job_name_from_tf_config():
+  """Extracts the TPU job name from TF_CONFIG env variable."""
+  # TODO(xiejw): Extends this to support both TF_CONFIG env variable and cluster
+  # spec propagation.
+  tf_config = json.loads(os.environ.get(_TF_CONFIG_ENV, '{}'))
+  tpu_job_name = tf_config.get(_SERVICE_KEY, {}).get(_TPU_WORKER_JOB_NAME)
+  if tpu_job_name:
+    tf.compat.v1.logging.info('Load TPU job name from TF_CONFIG: %s',
+                              tpu_job_name)
+  return tpu_job_name
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_context.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_context.py
new file mode 100644
index 00000000..316674c7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_context.py
@@ -0,0 +1,911 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""TPU system metadata and associated tooling."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from contextlib import contextmanager
+import copy
+import tensorflow as tf
+from tensorflow.python.distribute import distribution_strategy_context
+from tensorflow.python.ops import summary_ops_v2
+from tensorflow.python.tpu import device_assignment as tpu_device_assignment
+from tensorflow.python.tpu import tpu_system_metadata as tpu_system_metadata_lib
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.tpu import _tpu_estimator_embedding
+from tensorflow_estimator.python.estimator.tpu import tpu_config
+
+_DEFAULT_JOB_NAME = 'tpu_worker'
+_DEFAULT_COORDINATOR_JOB_NAME = 'coordinator'
+_LOCAL_MASTERS = ('', 'local')
+# TODO(pgavin): support PF 3D mesh
+_NUM_CORES_TO_COMPUTATION_SHAPE = {
+    1: [1, 1, 1, 1],
+    2: [1, 1, 1, 2],
+    4: [1, 2, 1, 2],
+    8: [2, 2, 1, 2],
+    16: [4, 2, 1, 2],
+    32: [4, 4, 1, 2],
+    64: [8, 4, 1, 2],
+    128: [8, 8, 1, 2],
+}
+
+
+class TPUContext(object):
+  """A context that holds the current configuration of the TPU computation.
+
+  TPUContext was designed for getting TPU context information when calling
+  input_fn. It can be called in model_fn as well.
+
+  User is not expected to construct the instance from constructor. The only
+  legitimate way to get the instance is either in `input_fn`:
+
+  ```
+  def input_fn(params):
+    batch_size = params['batch_size']
+    context = params['context']
+    # ...
+  ```
+
+  or in `model_fn`
+
+  ```
+  def model_fn(params):
+    batch_size = params['batch_size']
+    context = params['context']
+    # ...
+  ```
+
+  Most of the fields of TPUContext are useful for both `input_fn` and
+  `model_fn`. Exceptions are:
+
+  1. `input_fn` only:
+
+    current_input_fn_deployment
+    current_host
+
+  2. `model_fn` only:
+
+    device_assignment
+
+  """
+
+  def __init__(self,
+               internal_ctx,
+               input_device=None,
+               invocation_index=None,
+               call_from_input_fn=True,
+               host_id=None):
+    self._internal_ctx = internal_ctx
+    self._input_device = input_device
+    self._invocation_index = invocation_index
+    self._call_from_input_fn = call_from_input_fn
+    self._host_id = host_id
+
+  def current_input_fn_deployment(self):
+    """The configuration of the current input_fn invocation.
+
+    The configuration depends on `TPUConfig.per_host_input_for_training`. See
+    `TPUConfig` for details.
+
+    Only set in params dict of input_fn
+
+    Returns:
+      A tuple of
+        1. Device spec string: String, is the current CPU host where the
+           input_fn is invoked.
+        2. Current invocation index: Int, 0-based index of the input_fn
+           invocation. See next item for details.
+        3. Total invocation count: Int, the total number of times to invoke the
+           input_fn on all CPU hosts. Each invocation will be passed with a new
+           `TPUContext` instance with current invocation index set properly.
+        4. Total number of replicas consumed by current_invocation: Int, the
+           number of replicas fed by the data returned by current input_fn. For
+           example, for per_core input pipeline deployment
+           and non-model-parallelism, total invocation count is equal to
+           the number of cores in the system and num replicas consumed by
+           current invocation is 1. For per-host v2 input pipeline deployment,
+           total invocation count is equal to the number of hosts in the system
+           and num replicas consumed by current invocation is equal to number of
+           replicas per host.
+
+    Raises:
+      RuntimeError: If this method is not be called from input_fn.
+    """
+    if not self._call_from_input_fn:
+      raise RuntimeError('This TPUContext instance must not be called from'
+                         ' model_fn.')
+
+    if self._internal_ctx.is_input_sharded_per_core():
+      total_invocation_count = (
+          self._internal_ctx.num_hosts *
+          self._internal_ctx.num_of_replicas_per_host)
+      replicas_consumed = 1
+    elif self._internal_ctx.is_input_broadcast_with_iterators():
+      total_invocation_count = 1
+      replicas_consumed = self._internal_ctx.num_replicas
+    elif self._internal_ctx.is_replica_across_hosts():
+      total_invocation_count = self._internal_ctx.num_replicas
+      replicas_consumed = 1
+    else:
+      total_invocation_count = self._internal_ctx.num_hosts
+      replicas_consumed = self._internal_ctx.num_of_replicas_per_host
+    return (self._input_device, self._invocation_index, total_invocation_count,
+            replicas_consumed)
+
+  @property
+  def num_replicas(self):
+    """The total number of replicas.
+
+    For non-model-parallelism, num_replicas should be the total num of TPU
+    cores in the system.
+
+    Returns:
+      The number of replicas.
+    """
+    return self._internal_ctx.num_replicas
+
+  @property
+  def num_hosts(self):
+    """The number of hosts for the TPU system."""
+    return self._internal_ctx.num_hosts
+
+  @property
+  def current_host(self):
+    """The current host index for the TPU system.
+
+    Returns:
+      The host index (int).
+
+    Raises:
+      RuntimeError: If this method is not be called from input_fn.
+    """
+
+    if not self._call_from_input_fn:
+      raise RuntimeError('This TPUContext instance must not be called from'
+                         ' model_fn.')
+
+    return self._host_id
+
+  @property
+  def num_of_replicas_per_host(self):
+    """The number of replicas for each host."""
+    if self._internal_ctx.model_parallelism_enabled:
+      raise ValueError(
+          'num_of_replicas_per_host is not supported for model_parallelism')
+    return self._internal_ctx.num_of_replicas_per_host
+
+  @property
+  def device_assignment(self):
+    """Returns device_assignment object.
+
+    Raises:
+      RuntimeError: If this method is not be called from model_fn.
+    """
+    if self._call_from_input_fn:
+      raise RuntimeError('This TPUContext instance must not be called from'
+                         ' input_fn.')
+    return self._internal_ctx.device_assignment
+
+  def device_for_replica(self, replica_id):
+    """Returns the tuple of (CPU device and device ordinal) for replica.
+
+    This should be used for full replicate for non-model-parallelism.
+
+    Args:
+       replica_id: Int, the replica index.
+
+    Returns:
+       A tuple of device spec for CPU device and int device ordinal.
+    """
+    # Note that: For the non-model parallelism, the mapping could be
+    # a random permutation. The order should not matter in most cases
+    # as far as model is replicated to all cores in the system.
+    return self._internal_ctx.device_for_replica(replica_id)
+
+  @property
+  def tpu_host_placement_function(self):
+    """Returns the TPU host place function.
+
+    The place function takes host_id as the input and returns the TF device
+    for the correspoding host.
+    """
+
+    def _placement_function(host_id):
+      """Return the host device given host_id."""
+      return self._internal_ctx.tpu_host_placement_function(host_id=host_id)
+
+    return _placement_function
+
+
+class _InternalTPUContext(object):
+  """A context holds immutable states of TPU computation.
+
+  This immutable object holds TPUEstimator config, train/eval batch size, and
+  `TPUEstimator.use_tpu`, which is expected to be passed around. It also
+  provides utility functions, based on the current state, to determine other
+  information commonly required by TPU computation, such as TPU device names,
+  TPU hosts, shard batch size, etc.
+
+  if eval_on_tpu is False, then execution of eval on TPU is disabled.
+  if eval_on_tpu is True, but use_tpu is False, a warning is issued,
+  and TPU execution is disabled for all modes.
+
+  N.B. As `mode` is not immutable state in Estimator, but essential to
+  distinguish between TPU training and evaluation, a common usage for
+  _InternalTPUContext with `mode` is as follows:
+  ```
+  with _ctx.with_mode(mode) as ctx:
+    if ctx.is_running_on_cpu():
+       ...
+  ```
+  """
+
+  def __init__(self,
+               config,
+               train_batch_size,
+               eval_batch_size,
+               predict_batch_size,
+               use_tpu,
+               eval_on_tpu=True,
+               embedding_config_spec=None):
+    self._config = config
+    self._train_batch_size = train_batch_size
+    self._eval_batch_size = eval_batch_size
+    self._predict_batch_size = predict_batch_size
+    self._use_tpu = use_tpu
+    tf.compat.v1.logging.info('_TPUContext: eval_on_tpu %s', eval_on_tpu)
+    if not use_tpu and eval_on_tpu:
+      tf.compat.v1.logging.warn('eval_on_tpu ignored because use_tpu is False.')
+
+    self._eval_on_tpu = eval_on_tpu
+    self._model_parallelism_enabled = (
+        use_tpu and config.tpu_config.num_cores_per_replica)
+    self._mode = None
+    num_cores_per_replica = config.tpu_config.num_cores_per_replica
+    if self._model_parallelism_enabled:
+      self._computation_shape = _NUM_CORES_TO_COMPUTATION_SHAPE[
+          num_cores_per_replica]
+    else:
+      self._computation_shape = None
+    self._lazy_tpu_system_metadata_dict = {}  # key by master address
+    self._lazy_device_assignment_dict = {}  # key by master address
+    self._lazy_validation_dict = {}  # key by ModeKeys
+    self._embedding_config_spec = embedding_config_spec
+    self._lazy_embedding_config_dict = {}  # key by master address
+
+  def _assert_mode(self):
+    if self._mode is None:
+      raise RuntimeError(
+          '`mode` needs to be set via contextmanager `with_mode`.')
+    return self._mode
+
+  @contextmanager
+  def with_mode(self, mode):
+    # NOTE(xiejw): Shallow copy is enough. It will share he lazy dictionaries,
+    # such as _lazy_tpu_system_metadata_dict between new copy and the original
+    # one. Note that all lazy states stored in properties _lazy_foo are sort of
+    # immutable as they should be same for the process lifetime.
+    new_ctx = copy.copy(self)
+    new_ctx._mode = mode  # pylint: disable=protected-access
+    yield new_ctx
+
+  @property
+  def mode(self):
+    return self._assert_mode()
+
+  def _get_master_address(self):
+    mode = self._assert_mode()
+    config = self._config
+    master = (
+        config.master
+        if mode != model_fn_lib.ModeKeys.EVAL else config.evaluation_master)
+    return master
+
+  def _get_tpu_system_metadata(self):
+    """Gets the (maybe cached) TPU system metadata."""
+    master = self._get_master_address()
+    tpu_system_metadata = self._lazy_tpu_system_metadata_dict.get(master)
+    if tpu_system_metadata is not None:
+      return tpu_system_metadata
+
+    cluster_def = None
+    if (self._config.session_config and
+        self._config.session_config.cluster_def.job):
+      cluster_def = self._config.session_config.cluster_def
+
+    # pylint: disable=protected-access
+    tpu_system_metadata = (
+        tpu_system_metadata_lib._query_tpu_system_metadata(
+            master,
+            cluster_def=cluster_def,
+            query_topology=self.model_parallelism_enabled))
+
+    self._lazy_tpu_system_metadata_dict[master] = tpu_system_metadata
+    return tpu_system_metadata
+
+  def _get_device_assignment(self):
+    """Gets the (maybe cached) TPU device assignment."""
+    master = self._get_master_address()
+    device_assignment = self._lazy_device_assignment_dict.get(master)
+    if device_assignment is not None:
+      return device_assignment
+
+    tpu_system_metadata = self._get_tpu_system_metadata()
+
+    device_assignment = tpu_device_assignment.device_assignment(
+        tpu_system_metadata.topology,
+        computation_shape=self._computation_shape,
+        num_replicas=self.num_replicas)
+
+    tf.compat.v1.logging.info(
+        'num_cores_per_replica: %s',
+        str(self._config.tpu_config.num_cores_per_replica))
+    tf.compat.v1.logging.info('computation_shape: %s',
+                              str(self._computation_shape))
+    tf.compat.v1.logging.info('num_replicas: %d', self.num_replicas)
+    tf.compat.v1.logging.info(
+        'device_assignment.topology.device_coordinates: %s',
+        str(device_assignment.topology.device_coordinates))
+    tf.compat.v1.logging.info('device_assignment.core_assignment: %s',
+                              str(device_assignment.core_assignment))
+
+    self._lazy_device_assignment_dict[master] = device_assignment
+    return device_assignment
+
+  @property
+  def tensor_core_embedding_columns(self):
+    if self._embedding_config_spec:
+      return self._embedding_config_spec.tensor_core_feature_columns
+    return None
+
+  @property
+  def embedding_config(self):
+    """Returns the embedding config based on current mode."""
+    master = self._get_master_address()
+    if master in self._lazy_embedding_config_dict:
+      embedding_config = self._lazy_embedding_config_dict[master]
+    else:
+      embedding_config = None
+      if self._use_tpu and self._embedding_config_spec:
+        embedding_config = _tpu_estimator_embedding.EmbeddingConfig(
+            self._embedding_config_spec, self._train_batch_size,
+            self._eval_batch_size, self.num_hosts, self.num_cores, self.config)
+        if not embedding_config.has_embedding_tables():
+          embedding_config = None
+      self._lazy_embedding_config_dict[master] = embedding_config
+
+    if embedding_config is not None:
+      mode = self._assert_mode()
+      # Dynamically attach tpu_embedding based on mode. With
+      # this, we could keep embedding_config immutable but call site always
+      # accesses the unified API '.tpu_embedding'.
+      embedding_config.tpu_embedding = embedding_config.get_tpu_embedding(mode)
+    return embedding_config
+
+  @property
+  def allow_per_host_v2_parallel_get_next(self):
+    return (self._config.tpu_config
+            .experimental_allow_per_host_v2_parallel_get_next)
+
+  @property
+  def feed_hook(self):
+    return (self._config.tpu_config.experimental_feed_hook)
+
+  @property
+  def model_parallelism_enabled(self):
+    return self._model_parallelism_enabled
+
+  @property
+  def input_partition_dims(self):
+    return self._config.tpu_config.input_partition_dims
+
+  @property
+  def device_assignment(self):
+    return (self._get_device_assignment()
+            if self._model_parallelism_enabled else None)
+
+  @property
+  def num_of_cores_per_host(self):
+    metadata = self._get_tpu_system_metadata()
+    return metadata.num_of_cores_per_host
+
+  @property
+  def num_cores(self):
+    metadata = self._get_tpu_system_metadata()
+    return metadata.num_cores
+
+  @property
+  def num_of_replicas_per_host(self):
+    """Return the number of replicas per host."""
+    if self.model_parallelism_enabled:
+      # There can be fewer replicas. This might return 0!
+      return self.num_replicas // self.num_hosts
+    else:
+      return self.num_of_cores_per_host
+
+  @property
+  def num_replicas(self):
+    """Compute the total number of replicas."""
+    num_cores_in_system = self.num_cores
+
+    if self.model_parallelism_enabled:
+      num_cores_per_replica = self._config.tpu_config.num_cores_per_replica
+      if num_cores_per_replica > num_cores_in_system:
+        raise ValueError(
+            'The num of cores required by the model parallelism, specified by '
+            'TPUConfig.num_cores_per_replica, is larger than the total num of '
+            'TPU cores in the system. num_cores_per_replica: {}, num cores '
+            'in the system: {}'.format(num_cores_per_replica,
+                                       num_cores_in_system))
+
+      if num_cores_in_system % num_cores_per_replica != 0:
+        raise RuntimeError(
+            'The num of cores in the system ({}) is not divisible by the num '
+            'of cores ({}) required by the model parallelism, specified by '
+            'TPUConfig.num_cores_per_replica. This should never happen!'.format(
+                num_cores_in_system, num_cores_per_replica))
+
+      return num_cores_in_system // num_cores_per_replica
+    else:
+      return num_cores_in_system
+
+  @property
+  def num_hosts(self):
+    metadata = self._get_tpu_system_metadata()
+    return metadata.num_hosts
+
+  @property
+  def config(self):
+    return self._config
+
+  def is_input_sharded_per_core(self):
+    """Return true if input_fn is invoked per-core (other than per-host)."""
+    mode = self._assert_mode()
+    return (mode == model_fn_lib.ModeKeys.TRAIN and
+            (self._config.tpu_config.per_host_input_for_training is
+             tpu_config.InputPipelineConfig.PER_SHARD_V1))
+
+  def is_input_per_host_with_iterators(self):
+    """Return true if input_fn should be run in the per-host v2 config."""
+    return (self._config.tpu_config.per_host_input_for_training is
+            tpu_config.InputPipelineConfig.PER_HOST_V2)
+
+  def is_input_broadcast_with_iterators(self):
+    """Return true if input_fn should be run in the full_replicae config."""
+    return ((self._config.tpu_config.per_host_input_for_training is
+             tpu_config.InputPipelineConfig.BROADCAST) or
+            (self.is_input_slice_broadcast_to_all_cores()))
+
+  def is_input_slice_broadcast_to_all_cores(self):
+    """Return true if input_fn is invoked once and broadcast to other hosts."""
+    mode = self._assert_mode()
+    return (mode != model_fn_lib.ModeKeys.TRAIN and
+            self._config.tpu_config.eval_training_input_configuration is
+            tpu_config.InputPipelineConfig.SLICED)
+
+  def is_replica_across_hosts(self):
+    """Return true if single replica is across multiple hosts."""
+    # For example, when num_cores_per_replica > num_cores_per_host.
+    num_cores_per_replica = self._config.tpu_config.num_cores_per_replica
+    num_cores_per_host = self._get_tpu_system_metadata().num_of_cores_per_host
+    return (num_cores_per_replica is not None and
+            num_cores_per_replica > num_cores_per_host)
+
+  def is_running_on_cpu(self, is_export_mode=False):
+    """Determines whether the input_fn and model_fn should be invoked on CPU.
+
+    This API also validates user provided configuration, such as batch size,
+    according the lazy initialized TPU system metadata.
+
+    Args:
+      is_export_mode: Indicates whether the current mode is for exporting the
+        model, when mode == PREDICT. Only with this bool, we could tell whether
+        user is calling the Estimator.predict or Estimator.export_savedmodel,
+        which are running on TPU and CPU respectively. Parent class Estimator
+        does not distinguish these two.
+
+    Returns:
+      bool, whether current input_fn or model_fn should be running on CPU.
+
+    Raises:
+      ValueError: any configuration is invalid.
+    """
+
+    is_running_on_cpu = self._is_running_on_cpu(is_export_mode)
+    if not is_running_on_cpu:
+      self._validate_tpu_configuration()
+    return is_running_on_cpu
+
+  def _is_running_on_cpu(self, is_export_mode):
+    """Determines whether the input_fn and model_fn should be invoked on CPU."""
+    mode = self._assert_mode()
+
+    if not self._use_tpu:
+      return True
+
+    if mode == model_fn_lib.ModeKeys.EVAL and not self._eval_on_tpu:
+      tf.compat.v1.logging.info('_is_running_on_cpu: eval_on_tpu disabled')
+      return True
+
+    if is_export_mode:
+      return True
+
+    return False
+
+  @property
+  def global_batch_size(self):
+    mode = self._assert_mode()
+    if mode == model_fn_lib.ModeKeys.TRAIN:
+      return self._train_batch_size
+    elif mode == model_fn_lib.ModeKeys.EVAL:
+      return self._eval_batch_size
+    elif mode == model_fn_lib.ModeKeys.PREDICT:
+      return self._predict_batch_size
+    else:
+      return None
+
+  @property
+  def batch_size_for_input_fn(self):
+    """Returns the shard batch size for `input_fn`."""
+    global_batch_size = self.global_batch_size
+    if (self.is_running_on_cpu() or self.is_input_broadcast_with_iterators()):
+      return global_batch_size
+
+    # On TPU
+    if self.is_input_sharded_per_core() or (
+        self.is_input_per_host_with_iterators()) or (
+            self.is_replica_across_hosts()):
+      return global_batch_size // self.num_replicas
+    else:
+      return global_batch_size // self.num_hosts
+
+  @property
+  def batch_size_for_model_fn(self):
+    """Returns the shard batch size for `model_fn`."""
+    global_batch_size = self.global_batch_size
+
+    if (self.is_running_on_cpu() or self.is_input_broadcast_with_iterators() and
+        not self.is_input_slice_broadcast_to_all_cores()):
+      return global_batch_size
+
+    # On TPU. always sharded per shard.
+    return global_batch_size // self.num_replicas
+
+  @property
+  def master_job(self):
+    """Returns the job name to use to place TPU computations on.
+
+    Returns:
+      A string containing the job name, or None if no job should be specified.
+
+    Raises:
+      ValueError: If the user needs to specify a tpu_job_name, because we are
+        unable to infer the job name automatically, or if the user-specified job
+        names are inappropriate.
+    """
+    run_config = self._config
+    # If the user specifies the tpu_job_name, use that.
+    if run_config.tpu_config.tpu_job_name:
+      return run_config.tpu_config.tpu_job_name
+
+    # The tpu job is determined by the run_config. Right now, this method is
+    # required as tpu_config is not part of the RunConfig.
+    mode = self._assert_mode()
+    master = (
+        run_config.evaluation_master
+        if mode == model_fn_lib.ModeKeys.EVAL else run_config.master)
+    cluster_def = (
+        run_config.session_config.cluster_def
+        if run_config.session_config else None)
+
+    try:
+      master_job = tpu_system_metadata_lib.master_job(master, cluster_def)
+    except ValueError as e:
+      raise ValueError(
+          str(e) + ' Please specify a tpu_job_name as part of '
+          'your TPUConfig.')
+    return master_job
+
+  @property
+  def tpu_host_placement_function(self):
+    """Returns the TPU host place function."""
+
+    master = self.master_job
+
+    def _placement_function(_sentinal=None, replica_id=None, host_id=None):  # pylint: disable=invalid-name
+      """Return the host device given replica_id or host_id."""
+      assert _sentinal is None
+      if replica_id is not None and host_id is not None:
+        raise RuntimeError(
+            'replica_id and host_id can have only one non-None value.')
+
+      if master is None:
+        return '/replica:0/task:0/device:CPU:0'
+      else:
+        if replica_id is not None:
+          if self.model_parallelism_enabled:
+            return self.device_assignment.host_device(
+                replica=replica_id, job=master)
+          else:
+            host_id = replica_id / self.num_of_cores_per_host
+
+        return '/job:%s/task:%d/device:CPU:0' % (master, host_id)
+
+    return _placement_function
+
+  @property
+  def tpu_device_placement_function(self):
+    """Returns a TPU device placement Fn."""
+    master = self.master_job
+    job_device = '' if master is None else ('/job:%s' % master)
+
+    def _placement_function(i):
+      if self.model_parallelism_enabled:
+        return self.device_assignment.tpu_device(replica=i, job=master)
+      else:
+        num_of_cores_per_host = self.num_of_cores_per_host
+        host_id = i / num_of_cores_per_host
+        ordinal_id = i % num_of_cores_per_host
+        return '%s/task:%d/device:TPU:%d' % (job_device, host_id, ordinal_id)
+
+    return _placement_function
+
+  def tpu_ordinal_function(self, host_id):
+    """Returns the TPU ordinal fn."""
+
+    def _tpu_ordinal_function(shard_index_in_host):
+      """Return the TPU ordinal associated with a shard.
+
+      Required because the enqueue ops are placed on CPU.
+
+      Args:
+        shard_index_in_host: the shard index
+
+      Returns:
+        The ordinal of the TPU device the shard's infeed should be placed on.
+      """
+      if self.model_parallelism_enabled:
+        # We put both enqueue/dequeue ops at tpu.core(0) in each replica.
+        replica = self.device_assignment.lookup_replicas(host_id,
+                                                         0)[shard_index_in_host]
+        return self.device_assignment.tpu_ordinal(replica=replica)
+      else:
+        return shard_index_in_host % self.num_of_cores_per_host
+
+    return _tpu_ordinal_function
+
+  def _validate_tpu_configuration(self):
+    """Validates the configuration based on the TPU system metadata."""
+    mode = self._assert_mode()
+    if self._lazy_validation_dict.get(mode):
+      return
+
+    # All following information is obtained from TPU system metadata.
+    num_cores = self.num_cores
+    num_replicas = self.num_replicas
+    num_hosts = self.num_hosts
+
+    if not num_cores:
+      tpu_system_metadata = self._get_tpu_system_metadata()
+      raise RuntimeError(
+          'Cannot find any TPU cores in the system. Please double check '
+          'Tensorflow master address and TPU worker(s). Available devices '
+          'are {}.'.format(tpu_system_metadata.devices))
+
+    if self._config.tpu_config.num_shards:
+      user_provided_num_replicas = self._config.tpu_config.num_shards
+      if user_provided_num_replicas != num_replicas:
+        message = (
+            'TPUConfig.num_shards is not set correctly. According to TPU '
+            'system metadata for Tensorflow master ({}): num_replicas should '
+            'be ({}), got ({}). For non-model-parallelism, num_replicas should '
+            'be the total num of TPU cores in the system. For '
+            'model-parallelism, the total number of TPU cores should be '
+            'num_cores_per_replica * num_replicas. Please set it '
+            'accordingly or leave it as `None`'.format(
+                self._get_master_address(), num_replicas,
+                user_provided_num_replicas))
+
+        raise ValueError(message)
+
+    if self._config.tpu_config.num_cores_per_replica and (
+        not self.is_input_per_host_with_iterators()):
+      num_cores_per_replica = self._config.tpu_config.num_cores_per_replica
+      num_cores_per_host = self._get_tpu_system_metadata().num_of_cores_per_host
+      if num_cores_per_replica > num_cores_per_host:
+        raise ValueError(
+            'Except the PER_HOST_V2 mode, the num of cores required by '
+            'model parallelism specified by TPUConfig.num_cores_per_replica '
+            'should be less than or equal to the num_cores_per_host. '
+            'num_cores_per_replica: {}, num_cores_per_host: {}'.format(
+                num_cores_per_replica, num_cores_per_host))
+
+    if mode == model_fn_lib.ModeKeys.TRAIN:
+      if (self._train_batch_size % num_replicas != 0 and
+          not self.is_input_broadcast_with_iterators()):
+        raise ValueError(
+            'train batch size {} must be divisible by number of replicas {}'
+            .format(self._train_batch_size, num_replicas))
+
+    elif mode == model_fn_lib.ModeKeys.EVAL:
+      if self._eval_batch_size is None:
+        raise ValueError(
+            'eval_batch_size in TPUEstimator constructor cannot be `None` '
+            'if .evaluate is running on TPU.')
+      if (self._eval_batch_size % num_replicas != 0 and
+          not self.is_input_broadcast_with_iterators()):
+        raise ValueError(
+            'eval batch size {} must be divisible by number of replicas {}'
+            .format(self._eval_batch_size, num_replicas))
+      if (num_hosts != 1 and
+          not self.is_input_broadcast_with_iterators() and
+          not self.is_input_per_host_with_iterators()):
+        raise ValueError(
+            'TPUEstimator.evaluate is only supported under three conditions: '
+            '1. num_hosts=1; 2. BROADCAST mode; '
+            '3. PER_HOST_V2 mode. '
+            'mode: {}; num_hosts: {}; num_replicas=1:{}'.format(
+                self._config.tpu_config.per_host_input_for_training, num_hosts,
+                num_replicas))
+      if num_hosts > 1 and self.is_input_per_host_with_iterators():
+        tf.compat.v1.logging.warn('Running TPUEstimator.evaluate for input mode'
+                                  ' PER_HOST_V2 and num_hosts %d', num_hosts)
+    else:
+      assert mode == model_fn_lib.ModeKeys.PREDICT
+      if self._predict_batch_size is None:
+        raise ValueError(
+            'predict_batch_size in TPUEstimator constructor cannot be `None` '
+            'if .predict is running on TPU.')
+      if (self._predict_batch_size % num_replicas != 0 and
+          not self.is_input_broadcast_with_iterators()):
+        raise ValueError(
+            'predict batch size {} must be divisible by number of replicas {}'
+            .format(self._predict_batch_size, num_replicas))
+      if num_hosts != 1 and not (
+          self.is_input_broadcast_with_iterators()) and not (
+              num_replicas == 1 and self.is_input_per_host_with_iterators()):
+        raise ValueError(
+            'TPUEstimator.predict is only supported under three conditions: '
+            '1. num_hosts=1; 2. BROADCAST mode; '
+            '3. PER_HOST_V2 mode with num_replicas=1. '
+            'mode: {}; num_hosts: {}; num_replicas=1:{}'.format(
+                self._config.tpu_config.per_host_input_for_training, num_hosts,
+                num_replicas))
+
+    # Record the state "validated" into lazy dictionary.
+    self._lazy_validation_dict[mode] = True
+
+  def device_for_replica(self, replica_id):
+    """Returns the tuple of (CPU device and device ordinal) for replica.
+
+    This should be used for full replicate for non-model-parallelism.
+
+    Args:
+       replica_id: Int, the replica index.
+
+    Returns:
+       A tuple of device spec for CPU device and int device ordinal.
+    """
+    master = self.master_job
+
+    if self.model_parallelism_enabled:
+      return (self.device_assignment.host_device(
+          replica=replica_id,
+          job=master), self.device_assignment.tpu_ordinal(replica=replica_id))
+
+    job_device = '' if master is None else ('/job:%s' % master)
+
+    num_of_replicas_per_host = self.num_of_replicas_per_host
+    assert num_of_replicas_per_host > 0, (
+        'Got num_of_replicas_per_host: {}'.format(num_of_replicas_per_host))
+    host_id = replica_id / num_of_replicas_per_host
+    ordinal_id = replica_id % num_of_replicas_per_host
+
+    host_device = '%s/task:%d/device:CPU:0' % (job_device, host_id)
+    return (host_device, ordinal_id)
+
+
+class _OneCoreTPUContext(_InternalTPUContext):
+  """Special _InternalTPUContext for one core usage."""
+
+  def __init__(self, config, train_batch_size, eval_batch_size,
+               predict_batch_size, use_tpu):
+
+    super(_OneCoreTPUContext,
+          self).__init__(config, train_batch_size, eval_batch_size,
+                         predict_batch_size, use_tpu)
+
+  def _get_tpu_system_metadata(self):
+    """Gets the (maybe cached) TPU system metadata."""
+    master = self._get_master_address()
+    tpu_system_metadata = self._lazy_tpu_system_metadata_dict.get(master)
+    if tpu_system_metadata is not None:
+      return tpu_system_metadata
+
+    tpu_system_metadata = (
+        tpu_system_metadata_lib.TPUSystemMetadata(  # pylint: disable=protected-access
+            num_cores=1,
+            num_hosts=1,
+            num_of_cores_per_host=1,
+            topology=None,
+            devices=[]))
+
+    self._lazy_tpu_system_metadata_dict[master] = tpu_system_metadata
+    return tpu_system_metadata
+
+
+class _TPUEstimatorReplicaContext(tf.distribute.ReplicaContext):
+  """Internal context for storing replica id.
+
+  This is to set eager.context.Context() so that only summary ops from
+  0th replica is executed.
+  """
+
+  def __init__(self, replica_id_in_sync):
+    """Creates internal replica context for TPUEstimator.
+
+    Args:
+      replica_id_in_sync: Zero indexed integer id of replica that is running the
+        TPU compuation.
+    """
+    super(_TPUEstimatorReplicaContext, self).__init__(None, replica_id_in_sync)
+    # Use default strategy and replica context when variables are
+    # accessed/watched for backpropagation.
+    # pylint: disable=protected-access
+    self._thread_context = distribution_strategy_context._DefaultReplicaThreadMode(
+    )
+    self._strategy = self._thread_context.strategy
+    # pylint: enable=protected-access
+
+  def __enter__(self):
+
+    def replica_id_is_zero():
+      return tf.math.equal(self.replica_id_in_sync_group, tf.constant(0))
+
+    if hasattr(summary_ops_v2, '_summary_state'):
+      summary_state = summary_ops_v2._summary_state  # pylint: disable=protected-access
+      self._summary_recording_distribution_strategy = (
+          summary_state.is_recording_distribution_strategy)
+      summary_state.is_recording_distribution_strategy = replica_id_is_zero
+
+  def __exit__(self, exception_type, exception_value, traceback):
+    if hasattr(summary_ops_v2, '_summary_state'):
+      summary_state = summary_ops_v2._summary_state  # pylint: disable=protected-access
+      summary_state.is_recording_distribution_strategy = (
+          self._summary_recording_distribution_strategy)
+
+
+def _get_tpu_context(config, train_batch_size, eval_batch_size,
+                     predict_batch_size, use_tpu, eval_on_tpu,
+                     embedding_config_spec):
+  """Returns an instance of `_InternalTPUContext`."""
+
+  if (config.tpu_config.num_shards == 1 and
+      config.tpu_config.num_cores_per_replica is None):
+    if embedding_config_spec is not None:
+      raise ValueError('Setting TPUConfig.num_shards==1 is unsupported '
+                       'when embedding_config_spec is not None.')
+    tf.compat.v1.logging.warn(
+        'Setting TPUConfig.num_shards==1 is an unsupported behavior. '
+        'Please fix as soon as possible (leaving num_shards as None.)')
+    return _OneCoreTPUContext(config, train_batch_size, eval_batch_size,
+                              predict_batch_size, use_tpu)
+
+  return _InternalTPUContext(config, train_batch_size, eval_batch_size,
+                             predict_batch_size, use_tpu, eval_on_tpu,
+                             embedding_config_spec)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py
new file mode 100644
index 00000000..12efdfab
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py
@@ -0,0 +1,4554 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""TPUEstimator class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import copy
+import enum
+import math
+import os
+import signal
+import sys
+import threading
+import time
+
+import tensorflow as tf
+import numpy as np
+import six
+from six.moves import queue as Queue  # pylint: disable=redefined-builtin
+from six.moves import xrange  # pylint: disable=redefined-builtin
+
+from tensorflow.core.framework import variable_pb2
+from tensorflow.core.framework.summary_pb2 import Summary
+from tensorflow.core.protobuf import config_pb2
+from tensorflow.core.protobuf.tpu import compilation_result_pb2 as tpu_compilation_result
+from tensorflow.python.client import session as tf_session
+from tensorflow.python.data.ops import dataset_ops
+from tensorflow.python.data.util import nest as data_nest
+from tensorflow.python.distribute.cluster_resolver import tpu_cluster_resolver
+from tensorflow.python.eager import monitoring
+from tensorflow.python.framework import constant_op
+from tensorflow.python.framework import dtypes
+from tensorflow.python.framework import errors
+from tensorflow.python.framework import function
+from tensorflow.python.framework import ops
+from tensorflow.python.ops import array_ops
+from tensorflow.python.ops import batch_ops
+from tensorflow.python.ops import check_ops
+from tensorflow.python.ops import control_flow_ops
+from tensorflow.python.ops import control_flow_util
+from tensorflow.python.ops import init_ops
+from tensorflow.python.ops import math_ops
+from tensorflow.python.ops import resource_variable_ops
+from tensorflow.python.ops import state_ops
+from tensorflow.python.ops import summary_ops_v2 as contrib_summary
+from tensorflow.python.ops import variable_scope
+from tensorflow.python.ops import variables
+from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.saved_model import tag_constants
+from tensorflow.python.summary import summary
+from tensorflow.python.tpu import functional as tpu_functional
+from tensorflow.python.tpu import preempted_hook
+from tensorflow.python.tpu import session_support
+from tensorflow.python.tpu import tensor_tracer
+from tensorflow.python.tpu import tpu
+from tensorflow.python.tpu import tpu_embedding_gradient
+from tensorflow.python.tpu import tpu_feed
+from tensorflow.python.tpu import tpu_function
+from tensorflow.python.tpu import training_loop
+from tensorflow.python.tpu.ops import tpu_ops
+from tensorflow.python.training import basic_session_run_hooks
+from tensorflow.python.training import evaluation
+from tensorflow.python.training import session_run_hook
+from tensorflow.python.training import training
+from tensorflow.python.training import training_util
+from tensorflow.python.util import function_utils
+from tensorflow.python.util import nest
+from tensorflow.python.util import tf_inspect
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
+from tensorflow_estimator.python.estimator.export import export_output as export_output_lib
+from tensorflow_estimator.python.estimator.tpu import _tpu_estimator_embedding
+from tensorflow_estimator.python.estimator.tpu import error_handling
+from tensorflow_estimator.python.estimator.tpu import iteration_count_estimator
+from tensorflow_estimator.python.estimator.tpu import tpu_config
+from tensorflow_estimator.python.estimator.tpu import tpu_context
+from tensorflow_estimator.python.estimator.tpu import util as util_lib
+from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import AdagradParameters  # pylint: disable=unused-import
+from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import AdamParameters  # pylint: disable=unused-import
+from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import EmbeddingConfigSpec  # pylint: disable=unused-import
+from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import StochasticGradientDescentParameters  # pylint: disable=unused-import
+
+_INITIAL_LOSS = 1e7
+_ZERO_LOSS = 0.
+_TPU_ESTIMATOR = 'tpu_estimator'
+_ITERATIONS_PER_LOOP_VAR = 'iterations_per_loop'
+_BATCH_SIZE_KEY = 'batch_size'
+_CTX_KEY = 'context'
+_USE_TPU_KEY = 'use_tpu'
+_CROSS_REPLICA_SUM_OP = 'CrossReplicaSum'
+_ONE_GIGABYTE = 1024 * 1024 * 1024
+_TPU_ENQUEUE_OPS = '_tpu_enqueue_ops'
+_TPU_TRAIN_OP = '_tpu_train_op'
+_INFERENCE_ON_TPU_MODE = '_inference_on_tpu'
+_KEY_WHEN_PREDICTIONS_IS_A_TENSOR = '_key_when_predictions_is_a_tensor'
+_TENSOR_PACKER_SMALL_FEATURE_DIM_SIZE = 1
+_TENSOR_PACKER_MINIMUM_NUM_SMALL_FEATURES_TO_GROUP = 5
+_TENSOR_PACKER_CONCATENATED_SMALL_FEATURES_KEY = '_concatenated_small_features'
+
+# Ideally _USE_TPU_KEY should be reserved as well. However there are already
+# models that make use of this key, thus it can not be reserved now to prevent
+# breakage. In the long run, we would like to mitigate this by migrating models
+# off of using _USE_TPU_KEY.
+_RESERVED_PARAMS_KEYS = [_BATCH_SIZE_KEY, _CTX_KEY]
+
+# TODO(b/65703635): Flip the value and remove all dead code. Currently, this is
+# only used for per-core based deployments. For per-host based pipelines, if a
+# user returns a Dataset instance it will be automatically wrapped in a
+# tf.while_loop (This can be disabled by returning features and labels
+# explicitly).
+_WRAP_INPUT_FN_INTO_WHILE_LOOP = False
+
+# Track the adoption of TPUEstimator
+_tpu_estimator_gauge = monitoring.BoolGauge(
+    '/tensorflow/api/tpu_estimator',
+    'Whether the program uses tpu estimator or not.')
+
+if ops.get_to_proto_function('{}_{}'.format(_TPU_ESTIMATOR,
+                                            _ITERATIONS_PER_LOOP_VAR)) is None:
+  ops.register_proto_function(
+      '{}_{}'.format(_TPU_ESTIMATOR, _ITERATIONS_PER_LOOP_VAR),
+      proto_type=variable_pb2.VariableDef,
+      to_proto=resource_variable_ops._to_proto_fn,  # pylint: disable=protected-access
+      from_proto=resource_variable_ops._from_proto_fn)  # pylint: disable=protected-access
+
+
+def _is_iterable(obj):
+  """A Python 2 and 3 compatible util to check whether `obj` is iterable."""
+  try:
+    iter(obj)
+    return True
+  except TypeError:
+    return False
+
+
+class CatchInvalidHostcallFunctions(control_flow_ops.XLAControlFlowContext):
+
+  def AddOp(self, op):
+    if op.type in [
+        'AudioSummary', 'AudioSummaryV2', 'HistogramSummary', 'ImageSummary',
+        'MergeSummary', 'ScalarSummary', 'TensorSummary', 'TensorSummaryV2'
+    ]:
+      raise ValueError('Please use tf.contrib.summary instead of tf.summary '
+                       'inside of host_calls.')
+
+
+def _create_global_step(graph):
+  graph = graph or tf.compat.v1.get_default_graph()
+  if tf.compat.v1.train.get_global_step(graph) is not None:
+    raise ValueError('"global_step" already exists.')
+  # Create in proper graph and base name_scope.
+  with graph.as_default() as g, g.name_scope(None):
+    return tf.compat.v1.get_variable(
+        tf.compat.v1.GraphKeys.GLOBAL_STEP,
+        shape=[],
+        dtype=tf.dtypes.int64,
+        initializer=tf.compat.v1.initializers.zeros(),
+        trainable=False,
+        use_resource=True,
+        collections=[
+            tf.compat.v1.GraphKeys.GLOBAL_VARIABLES,
+            tf.compat.v1.GraphKeys.GLOBAL_STEP
+        ])
+
+
+def _create_or_get_iterations_per_loop():
+  """Creates or gets the iterations_per_loop variable.
+
+  In TPUEstimator, the user provided computation, the model_fn, is wrapped
+  inside a tf.while_loop for peak performance. The iterations of the loop are
+  specified by this variable, which adjusts its value on the CPU after each TPU
+  program execution and before the next TPU execution.
+
+  The purpose of using a variable, rather then a constant, is to allow
+  TPUEstimator adapt the TPU training iterations according to the final steps
+  specified by users. For example, if the user sets the iterations_per_loop as 4
+  in TPUConfig and steps as 10 in TPUEstimator.train(), the iterations_per_loop
+  variable will have the following value before each TPU training.
+
+      - 1-th TPU execution: iterations_per_loop = 4
+      - 2-th TPU execution: iterations_per_loop = 4
+      - 3-th TPU execution: iterations_per_loop = 2
+
+  As model_fn increases the global step once per train_op invocation, the global
+  step is 10 after all TPU executions, matching the steps=10 inputs passed in by
+  users.
+
+  Returns:
+    A TF non-trainable resource variable.
+
+  Raises:
+    RuntimeError: If multi iterations_per_loop variables were found.
+  """
+  graph = tf.compat.v1.get_default_graph()
+  collection_name = '{}_{}'.format(_TPU_ESTIMATOR, _ITERATIONS_PER_LOOP_VAR)
+  iter_vars = graph.get_collection(collection_name)
+  if len(iter_vars) == 1:
+    return iter_vars[0]
+  elif len(iter_vars) > 1:
+    raise RuntimeError('Multiple iterations_per_loop_var in collection.')
+
+  with ops.colocate_with(tf.compat.v1.train.get_global_step()):
+    with tf.compat.v1.variable_scope(
+        _TPU_ESTIMATOR, reuse=tf.compat.v1.AUTO_REUSE):
+      return tf.compat.v1.get_variable(
+          _ITERATIONS_PER_LOOP_VAR,
+          initializer=tf.compat.v1.initializers.zeros(),
+          shape=[],
+          dtype=tf.dtypes.int32,
+          trainable=False,
+          collections=[collection_name, tf.compat.v1.GraphKeys.LOCAL_VARIABLES],
+          use_resource=True)
+
+
+def _sync_variables_ops(ctx):
+  """Create varriables synchronization ops.
+
+  Gets the variables back from TPU nodes. This means the variables updated
+  by TPU will now be *synced* to host memory.
+  In BROADCAST mode, we skip this sync since the variables are ususally too
+  big to transmit via RPC.
+
+  Args:
+    ctx: A `_InternalTPUContext` instance with mode.
+
+  Returns:
+    A list of sync ops.
+  """
+
+  if not ctx.is_input_broadcast_with_iterators():
+    return [
+        tf.debugging.check_numerics(v.read_value(),
+                                    'Gradient for %s is NaN' % v.name).op
+        for v in tf.compat.v1.trainable_variables()
+    ]
+  else:
+    return [tf.no_op()]
+
+
+def _increase_eval_step_op(iterations_per_loop):
+  """Returns an op to increase the eval step for TPU evaluation.
+
+  Args:
+    iterations_per_loop: Tensor. The number of eval steps running in TPU system
+      before returning to CPU host for each `Session.run`.
+
+  Returns:
+    An operation
+  """
+  eval_step = evaluation._get_or_create_eval_step()  # pylint: disable=protected-access
+  # Estimator evaluate increases 1 by default. So, we increase the difference.
+  return tf.compat.v1.assign_add(
+      eval_step,
+      tf.cast(iterations_per_loop - 1, dtype=eval_step.dtype),
+      use_locking=True)
+
+
+def _extract_key_names(tensor_or_dict):
+  if isinstance(tensor_or_dict, dict):
+    return sorted(tensor_or_dict.keys())
+  return []
+
+
+class PeriodicLogger(object):
+
+  def __init__(self, seconds):
+    self._log_every_n_seconds = seconds
+    self._last_log_time = 0
+
+  def log(self, msg, *args, **kw):
+    if time.time() - self._last_log_time > self._log_every_n_seconds:
+      self._last_log_time = time.time()
+      tf.compat.v1.logging.info(msg, *args, **kw)
+
+
+class _SIGNAL(object):
+  """Signal used to control the thread of infeed/outfeed.
+
+  All preserved signals must be negative numbers. Positive numbers are used to
+  indicate the number of iterations for next training/evaluation loop.
+  """
+  NEXT_BATCH = -1
+  STOP = -2
+
+
+@estimator_export(v1=['estimator.tpu.TPUEstimatorSpec'])
+class TPUEstimatorSpec(model_fn_lib._TPUEstimatorSpec):  # pylint: disable=protected-access
+  """Ops and objects returned from a `model_fn` and passed to `TPUEstimator`.
+
+  See `EstimatorSpec` for `mode`, `predictions`, `loss`, `train_op`, and
+  `export_outputs`.
+
+  For evaluation, `eval_metrics `is a tuple of `metric_fn` and `tensors`, where
+  `metric_fn` runs on CPU to generate metrics and `tensors` represents the
+  `Tensor`s transferred from TPU system to CPU host and passed to `metric_fn`.
+  To be precise, TPU evaluation expects a slightly different signature from the
+  `tf.estimator.Estimator`. While `EstimatorSpec.eval_metric_ops` expects a
+  dict, `TPUEstimatorSpec.eval_metrics` is a tuple of `metric_fn` and `tensors`.
+  The `tensors` could be a list of `Tensor`s or dict of names to `Tensor`s. The
+  `tensors` usually specify the model logits, which are transferred back from
+  TPU system to CPU host. All tensors must have be batch-major, i.e., the batch
+  size is the first dimension. Once all tensors are available at CPU host from
+  all shards, they are concatenated (on CPU) and passed as positional arguments
+  to the `metric_fn` if `tensors` is list or keyword arguments if `tensors` is
+  a dict. `metric_fn` takes the `tensors` and returns a dict from metric string
+  name to the result of calling a metric function, namely a `(metric_tensor,
+  update_op)` tuple. See `TPUEstimator` for MNIST example how to specify the
+  `eval_metrics`.
+
+  `scaffold_fn` is a function running on CPU to generate the `Scaffold`. This
+  function should not capture any Tensors in `model_fn`.
+
+  `host_call` is a tuple of a `function` and a list or dictionary of `tensors`
+  to pass to that function and returns a list of Tensors. `host_call` currently
+  works for train() and evaluate(). The Tensors returned by the function is
+  executed on the CPU on every step, so there is communication overhead when
+  sending tensors from TPU to CPU. To reduce the overhead, try reducing the
+  size of the tensors. The `tensors` are concatenated along their major (batch)
+  dimension, and so must be >= rank 1. The `host_call` is useful for writing
+  summaries with `tf.contrib.summary.create_file_writer`.
+  """
+
+  def __new__(cls,
+              mode,
+              predictions=None,
+              loss=None,
+              train_op=None,
+              eval_metrics=None,
+              export_outputs=None,
+              scaffold_fn=None,
+              host_call=None,
+              training_hooks=None,
+              evaluation_hooks=None,
+              prediction_hooks=None):
+    """Creates a validated `TPUEstimatorSpec` instance."""
+    cls._host_calls = {}
+    if eval_metrics is not None:
+      cls._host_calls['eval_metrics'] = eval_metrics
+    if host_call is not None:
+      cls._host_calls['host_call'] = host_call
+    _OutfeedHostCall.validate(cls._host_calls)
+
+    training_hooks = tuple(training_hooks or [])
+    evaluation_hooks = tuple(evaluation_hooks or [])
+    prediction_hooks = tuple(prediction_hooks or [])
+
+    for hook in training_hooks + evaluation_hooks + prediction_hooks:
+      if not isinstance(hook, tf.compat.v1.train.SessionRunHook):
+        raise TypeError(
+            'All hooks must be SessionRunHook instances, given: {}'.format(
+                hook))
+
+    return super(TPUEstimatorSpec, cls).__new__(
+        cls,
+        mode=mode,
+        predictions=predictions,
+        loss=loss,
+        train_op=train_op,
+        eval_metrics=eval_metrics,
+        export_outputs=export_outputs,
+        scaffold_fn=scaffold_fn,
+        host_call=host_call,
+        training_hooks=training_hooks,
+        evaluation_hooks=evaluation_hooks,
+        prediction_hooks=prediction_hooks)
+
+  def as_estimator_spec(self):
+    """Creates an equivalent `EstimatorSpec` used by CPU train/eval."""
+    host_call_ret = _OutfeedHostCall.create_cpu_hostcall(self._host_calls)
+    eval_metric_ops = None
+    if self.eval_metrics is not None:
+      eval_metric_ops = host_call_ret['eval_metrics']
+    hooks = None
+    if self.host_call is not None:
+      hooks = [_OutfeedHostCallHook(host_call_ret['host_call'])]
+    loss = self.loss
+    if tensor_tracer.TensorTracer.is_enabled() \
+       and self.train_op is not None:
+      tt = tensor_tracer.TensorTracer()
+      loss = tt.trace_cpu(tf.compat.v1.get_default_graph(), loss, self.train_op)
+
+    hooks = tuple(hooks or [])
+    scaffold = self.scaffold_fn() if self.scaffold_fn else None
+    return model_fn_lib.EstimatorSpec(
+        mode=self.mode,
+        predictions=self.predictions,
+        loss=loss,
+        train_op=self.train_op,
+        eval_metric_ops=eval_metric_ops,
+        export_outputs=self.export_outputs,
+        scaffold=scaffold,
+        training_hooks=self.training_hooks + hooks,
+        evaluation_hooks=self.evaluation_hooks + hooks,
+        prediction_hooks=self.prediction_hooks + hooks)
+
+
+class _OpQueueContext(object):
+  """Manages work queue and thread for a infeed/outfeed thread."""
+
+  def __init__(self, name, target, args):
+    self._name = name
+    self._queue = Queue.Queue()
+    args = (self,) + args
+    self._thread = threading.Thread(name=name, target=target, args=args)
+    self._thread.daemon = True
+    self._thread.start()
+
+  def stop(self):
+    self._queue.put(_SIGNAL.STOP)
+
+  def send_next_batch_signal(self, iterations):
+    self._queue.put(iterations)
+
+  def read_iteration_counts(self):
+    while True:
+      iterations = self._queue.get(block=True)
+      tf.compat.v1.logging.debug('%s read iterations %s', self._name,
+                                 iterations)
+      if iterations == _SIGNAL.STOP:
+        tf.compat.v1.logging.info('%s received shutdown signal, stopping.',
+                                  self._name)
+        return
+      yield iterations
+
+  def join(self):
+    tf.compat.v1.logging.info('Shutting down %s thread.', self._name)
+    self.stop()
+    self._thread.join()
+
+
+class _OpSignalOnceQueueContext(_OpQueueContext):
+  """Manages work queue and thread for a infeed/outfeed thread.
+
+  This subclass only signals once.
+  """
+
+  def __init__(self, name, target, args):
+    super(_OpSignalOnceQueueContext, self).__init__(name, target, args)
+    self._has_signaled = False
+
+  def send_next_batch_signal(self, iterations):
+    if not self._has_signaled:
+      self._queue.put(iterations)
+      self._has_signaled = True
+
+
+class TPUInfeedOutfeedSessionHook(tf.compat.v1.train.SessionRunHook):
+  """A Session hook setting up the TPU initialization, infeed, and outfeed.
+
+  This hook does two major things:
+  1. initialize and shutdown TPU system.
+  2. launch and join the threads for infeed enqueue and (optional) outfeed
+     dequeue.
+  """
+
+  def __init__(self,
+               ctx,
+               enqueue_ops,
+               dequeue_ops,
+               tpu_compile_op,
+               run_infeed_loop_on_coordinator=True,
+               rendezvous=None,
+               master=None,
+               session_config=None,
+               tpu_init_ops=None,
+               outfeed_every_n_steps=1):
+    self._master_job = ctx.master_job
+    self._enqueue_ops = enqueue_ops
+    self._dequeue_ops = dequeue_ops
+    self._rendezvous = rendezvous
+    self._master = master
+    self._session_config = session_config
+    self._init_ops = list(tpu_init_ops or [])
+    if ctx.embedding_config is None:
+      self._embedding_layer_config = None
+    else:
+      self._embedding_layer_config = (
+          ctx.embedding_config.tpu_embedding.config_proto)
+    self._run_infeed_loop_on_coordinator = run_infeed_loop_on_coordinator
+    self._initial_infeed_sleep_secs = (
+        ctx.config.tpu_config.initial_infeed_sleep_secs)
+    self._tpu_compile_op = tpu_compile_op
+
+    # When using model parallelism, the TPU is pre-initialized at startup to
+    # fetch mesh information. We skip re-initializing it here for
+    # MeshTensorFlow since it places variables on TPU directly. Reinitialize tpu
+    # is causing the variable corruption since the previous allocated memory
+    # might be overwritten for other purpose.
+    if (ctx.model_parallelism_enabled and
+        (ctx.config.tpu_config.per_host_input_for_training is
+         tpu_config.InputPipelineConfig.BROADCAST)):
+      self._should_initialize_tpu = False
+    else:
+      self._should_initialize_tpu = True
+    self._outfeed_every_n_steps = outfeed_every_n_steps
+
+  def begin(self):
+    tf.compat.v1.logging.info('TPU job name %s', self._master_job)
+    self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
+    if self._should_initialize_tpu:
+      self._finalize_ops = [
+          tf.compat.v1.tpu.shutdown_system(job=self._master_job)
+      ]
+    else:
+      self._finalize_ops = []
+
+    summary_writer_init_ops = contrib_summary.summary_writer_initializer_op()
+    self._init_ops.extend(summary_writer_init_ops)
+    # Get all the writer resources from the initializer, so we know what to
+    # flush.
+    for op in summary_writer_init_ops:
+      self._finalize_ops.append(contrib_summary.flush(writer=op.inputs[0]))
+
+  def _run_infeed(self, queue_ctx, session):
+    tf.compat.v1.logging.info('Starting infeed thread controller.')
+    if self._initial_infeed_sleep_secs:
+      tf.compat.v1.logging.info('Infeed thread sleeping for %d seconds.',
+                                self._initial_infeed_sleep_secs)
+      time.sleep(self._initial_infeed_sleep_secs)
+      tf.compat.v1.logging.info('Infeed thread starting after sleep')
+
+    with self._rendezvous.catch_errors(source='infeed', session=session):
+      if self._run_infeed_loop_on_coordinator:
+        for count, steps in enumerate(queue_ctx.read_iteration_counts()):
+          for i in xrange(steps):
+            tf.compat.v1.logging.debug('Infeed enqueue for iteration (%d, %d)',
+                                       count, i)
+            session.run(self._enqueue_ops)
+      else:
+        for _ in queue_ctx.read_iteration_counts():
+          session.run(self._enqueue_ops)
+      tf.compat.v1.logging.info('Infeed thread finished, shutting down.')
+
+  def _run_outfeed(self, queue_ctx, session):
+    tf.compat.v1.logging.info('Starting outfeed thread controller.')
+    status_logger = PeriodicLogger(seconds=60)
+    with self._rendezvous.catch_errors(source='outfeed', session=session):
+      for count, steps in enumerate(queue_ctx.read_iteration_counts()):
+        step_counter = 0
+        for i in xrange(steps):
+          tf.compat.v1.logging.debug('Outfeed dequeue for iteration (%d, %d)',
+                                     count, i)
+          if step_counter % self._outfeed_every_n_steps == 0:
+            session.run(self._dequeue_ops)
+          step_counter += 1
+          status_logger.log('Outfeed finished for iteration (%d, %d)', count, i)
+      tf.compat.v1.logging.info('Outfeed thread finished, shutting down.')
+
+  def _create_infeed_controller(self, name, target, args):
+    return _OpQueueContext(name=name, target=target, args=args)
+
+  def _assertCompilationSucceeded(self, result, coord):
+    proto = tpu_compilation_result.CompilationResultProto()
+    proto.ParseFromString(result)
+    if proto.status_error_message:
+      tf.compat.v1.logging.error('Compilation failed: {}'.format(
+          proto.status_error_message))
+      coord.request_stop()
+    else:
+      tf.compat.v1.logging.info('Compilation succeeded')
+
+  def after_create_session(self, session, coord):
+    if self._should_initialize_tpu:
+      tf.compat.v1.logging.info('Init TPU system')
+      start = time.time()
+      with tf.Graph().as_default():
+        with tf.compat.v1.Session(
+            self._master, config=self._session_config) as sess:
+          sess.run(
+              tf.compat.v1.tpu.initialize_system(
+                  job=self._master_job,
+                  embedding_config=self._embedding_layer_config))
+      tf.compat.v1.logging.info('Initialized TPU in %d seconds',
+                                time.time() - start)
+
+    session.run(
+        self._init_ops,
+        options=config_pb2.RunOptions(timeout_in_ms=30 * 60 * 1000))
+
+    if os.environ.get('TPU_SPLIT_COMPILE_AND_EXECUTE', '') == '1':
+      tf.compat.v1.logging.info(
+          'Compiling user program: this may take a while...')
+      self._assertCompilationSucceeded(session.run(self._tpu_compile_op), coord)
+
+    self._infeed_controller = self._create_infeed_controller(
+        name='InfeedController', target=self._run_infeed, args=(session,))
+
+    self._outfeed_controller = _OpQueueContext(
+        name='OutfeedController', target=self._run_outfeed, args=(session,))
+
+    # Enable the worker watchdog to terminate workers on coordinator exit.
+    watchdog_timeout = int(os.environ.get('TF_TPU_WATCHDOG_TIMEOUT', '0'))
+    if watchdog_timeout > 0:
+      session_support.start_worker_watchdog(
+          session, shutdown_timeout=watchdog_timeout)
+
+  def before_run(self, run_context):
+    iterations = run_context.session.run(self._iterations_per_loop_var)
+
+    tf.compat.v1.logging.info('Enqueue next (%d) batch(es) of data to infeed.',
+                              iterations)
+    self._infeed_controller.send_next_batch_signal(iterations)
+
+    tf.compat.v1.logging.info(
+        'Dequeue next (%d) batch(es) of data from outfeed.', iterations)
+    self._outfeed_controller.send_next_batch_signal(iterations)
+
+  def end(self, session):
+    tf.compat.v1.logging.info('Stop infeed thread controller')
+    self._infeed_controller.join()
+    self._rendezvous.record_done('infeed')
+
+    tf.compat.v1.logging.info('Stop output thread controller')
+    self._outfeed_controller.join()
+    self._rendezvous.record_done('outfeed')
+
+    tf.compat.v1.logging.info('Shutdown TPU system.')
+    session.run(self._finalize_ops)
+
+
+class TPUInfeedOutfeedSessionHookForPrediction(TPUInfeedOutfeedSessionHook):
+
+  def __init__(self,
+               ctx,
+               enqueue_ops,
+               dequeue_ops,
+               tpu_compile_op,
+               rendezvous=None,
+               master=None,
+               session_config=None):
+    super(TPUInfeedOutfeedSessionHookForPrediction, self).__init__(
+        ctx,
+        enqueue_ops,
+        dequeue_ops,
+        tpu_compile_op=tpu_compile_op,
+        run_infeed_loop_on_coordinator=False,
+        rendezvous=rendezvous,
+        master=master,
+        session_config=session_config)
+
+  def _create_infeed_controller(self, name, target, args):
+    return _OpSignalOnceQueueContext(name=name, target=target, args=args)
+
+
+class _TPUStopAtStepHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop at a specified step.
+
+  This hook is similar to the `session_run_hook._StopAfterNEvalsHook` with
+  following differences for TPU training:
+
+  1. This hook sets the variable for `iterations_per_loop`, which is used by
+     `TPUInfeedOutfeedSessionHook` to control the iterations for infeed/outfeed.
+     If the `iterations_per_loop` value is specified as time in seconds, the
+     number of iterations per `Session.run` will be estimated automatically
+     based on per iteration runtime.
+
+     As the hook execution order is not guaranteed, the variable update is
+     handled in `after_create_session` and `after_run` as
+     `TPUInfeedOutfeedSessionHook` reads the variable value in `before_run`.
+
+  2. For each training loop (session.run), the global step could be increased
+     multiple times on TPU. The global step tensor value will be explicitly read
+     again in `after_run` to ensure the latest value is retrieved to avoid race
+     condition.
+  """
+
+  def __init__(self,
+               iterations_per_loop_counter,
+               num_steps=None,
+               final_step=None):
+    """Initializes a `TPUStopAtStepHook`.
+
+    Args:
+      iterations_per_loop_counter: A namedtuple of [`value',`unit`] that
+        represents the number of 'iterations count' or 'time in seconds' to run
+        optimizer per loop, based on the `unit` specified, `count` or `seconds`
+        respectively.
+      num_steps: Number of steps to execute.
+      final_step: Step after which to stop.
+
+    Raises:
+      ValueError: If one of the arguments is invalid.
+    """
+    if num_steps is None and final_step is None:
+      raise ValueError('One of `num_steps` or `final_step` must be specified.')
+    if num_steps is not None and final_step is not None:
+      raise ValueError(
+          'Only one of `num_steps` or `final_step` can be specified.')
+    self._iterations_per_loop_counter = iterations_per_loop_counter
+    if self._iterations_per_loop_counter.unit not in ['seconds', 'count']:
+      raise ValueError('Only `count` or `seconds` are accepted as the '
+                       '`iterations_per_loop_counter.unit')
+    self._num_steps = num_steps
+    self._final_step = final_step
+    self._next_iteration_count = 1
+    self._iteration_count_estimator = None
+    if self._iterations_per_loop_counter.unit == 'seconds':
+      self._iteration_count_estimator = (
+          iteration_count_estimator.IterationCountEstimator())
+    self._start_time = time.time()
+
+  def _next_iterations(self, global_step, final_step):
+    """Computes the next iterations count.
+
+    The next iterations count is computed by choosing the smaller of the
+    remaining step count (`final_step` - `global_step`) and the estimated
+    iterations count returned by the estimator.
+
+    Args:
+      global_step: The current step.
+      final_step: Step after which to stop.
+
+    Returns:
+      The number of iterations count to run per loop.
+    """
+    remaining_steps = final_step - global_step
+
+    if self._iteration_count_estimator is not None:
+      estimated_iterations = self._iteration_count_estimator.get(
+          self._iterations_per_loop_counter.value)
+    else:
+      estimated_iterations = self._iterations_per_loop_counter.value
+
+    self._next_iteration_count = min(remaining_steps, estimated_iterations)
+    return self._next_iteration_count
+
+  def begin(self):
+    """Initializes variables.
+
+    Initializes the global step and iterations per loop variables.
+
+    Raises:
+      RuntimeError: An error occurred if global step variable does not exist.
+    """
+    self._global_step_tensor = tf.compat.v1.train.get_global_step()
+    if self._global_step_tensor is None:
+      raise RuntimeError('Global step should be created.')
+
+    self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
+
+  def after_create_session(self, session, coord):
+    """Computes and updates the first time iterations count.
+
+    The iterations are computed by choosing the smaller of the (`final step` -
+    `global step`), and the initial estimated iterations returned by the
+    estimator (by default is 1).
+
+    Args:
+      session: A TensorFlow Session that has been created.
+      coord: A Coordinator object which keeps track of all threads.
+    """
+    global_step = session.run(self._global_step_tensor)
+    if self._final_step is None:
+      self._final_step = global_step + self._num_steps
+
+    iterations = self._next_iterations(global_step, self._final_step)
+    self._iterations_per_loop_var.load(iterations, session=session)
+
+  def before_run(self, run_context):
+    """Reset the timer."""
+    if self._iteration_count_estimator is not None:
+      self._start_time = time.time()
+
+  def after_run(self, run_context, run_values):
+    """Computes the next iterations per loop value or terminates.
+
+    Computes the elapsed time to run the last optimizer loop and if the
+    `IterationCountEstimator` is used, records the elapsed time and iterations
+    count. If the final step count has been reached, terminates. Otherwise,
+    computes and updates the number of iterations to run the optimizer per loop.
+
+    Args:
+      run_context: A `SessionRunContext` object.
+      run_values: A SessionRunValues object.
+    """
+    if self._iteration_count_estimator is not None:
+      elapsed_time = time.time() - self._start_time
+      tf.compat.v1.logging.info('ElapsedTime: %.3f', elapsed_time)
+      self._iteration_count_estimator.update(elapsed_time,
+                                             self._next_iteration_count)
+
+    # Global step cannot be retrieved via SessionRunArgs and before_run due to
+    # race condition.
+    global_step = run_context.session.run(self._global_step_tensor)
+    if global_step >= self._final_step:
+      run_context.request_stop()
+    else:
+      iterations = self._next_iterations(global_step, self._final_step)
+      self._iterations_per_loop_var.load(
+          iterations, session=run_context.session)
+
+
+class _SetEvalIterationsHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop at a specified step."""
+
+  def __init__(self, num_steps):
+    """Initializes a `_SetEvalIterationsHook`.
+
+    Args:
+      num_steps: Number of steps to execute.
+    """
+    self._num_steps = num_steps
+
+  def begin(self):
+    self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
+
+  def after_create_session(self, session, coord):
+    self._iterations_per_loop_var.load(self._num_steps, session=session)
+
+
+class _StoppingPredictHook(tf.compat.v1.train.SessionRunHook):
+  """Hook that requests stop according to the stopping signal in prediction."""
+
+  def __init__(self, scalar_stopping_signal):
+    self._scalar_stopping_signal = scalar_stopping_signal
+
+  def begin(self):
+    self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
+
+  def after_create_session(self, session, coord):
+    # This is not necessary as we do not run infeed enqueue and outfeed dequeue
+    # in side threads for prediction model. But it makes the
+    # TPUInfeedOutfeedSessionHook prints nice message.
+    self._iterations_per_loop_var.load(1, session=session)
+
+  def before_run(self, run_context):
+    return tf.compat.v1.train.SessionRunArgs(self._scalar_stopping_signal)
+
+  def after_run(self, run_context, run_values):
+    _ = run_context
+    scalar_stopping_signal = run_values.results
+    if _StopSignals.should_stop(scalar_stopping_signal):
+      # NOTE(xiejw): In prediction, stopping signals are inserted for each
+      # batch. And we append one more batch to signal the system it should stop.
+      # The data flow might look like
+      #
+      #  batch   0: images, labels, stop = 0  (user provided)
+      #  batch   1: images, labels, stop = 0  (user provided)
+      #  ...
+      #  batch  99: images, labels, stop = 0  (user provided)
+      #  batch 100: images, labels, stop = 1  (TPUEstimator appended)
+      #
+      # where the final batch (id = 100) is appended by TPUEstimator, so we
+      # should drop it before returning the predictions to user.
+      # To achieve that, we throw the OutOfRangeError in after_run. Once
+      # Monitored Session sees this error in SessionRunHook.after_run, the
+      # "current" prediction, i.e., batch with id=100, will be discarded
+      # immediately
+      raise tf.errors.OutOfRangeError(None, None, 'Stopped by stopping signal.')
+
+
+def generate_per_core_enqueue_ops_fn_for_host(ctx, input_fn,
+                                              inputs_structure_recorder,
+                                              host_device, host_id):
+  """Generates infeed enqueue ops for per-core input_fn on a single host."""
+  captured_infeed_queue = _CapturedObject()
+  tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id)
+
+  def enqueue_ops_fn():
+    """A fn returns enqueue_ops."""
+    num_cores_per_host = ctx.num_of_cores_per_host
+    per_host_sharded_inputs = []
+    for core_ordinal in range(num_cores_per_host):
+      with ops.name_scope('ordinal_%d' % (core_ordinal)):
+        user_context = tpu_context.TPUContext(
+            internal_ctx=ctx,
+            input_device=host_device,
+            invocation_index=host_id * ctx.num_of_cores_per_host + core_ordinal,
+            host_id=host_id)
+        inputs = _Inputs.from_input_fn(input_fn(user_context))
+        if inputs.is_dataset:
+          raise TypeError(
+              '`input_fn` returning `Dataset`  is not yet supported in '
+              'per-Core input pipeline deployment yet. Please set '
+              'TPUConfig.per_host_input_for_training to True or return '
+              '`features` and `labels` from `input_fn`')
+        features, labels = inputs.features_and_labels()
+
+        inputs_structure_recorder.validate_and_record_structure(
+            features, labels)
+        flattened_inputs = (
+            inputs_structure_recorder.flatten_features_and_labels(
+                features, labels))
+        per_host_sharded_inputs.append(flattened_inputs)
+
+    infeed_queue = tpu_feed.InfeedQueue(
+        number_of_tuple_elements=len(per_host_sharded_inputs[0]))
+    captured_infeed_queue.capture(infeed_queue)
+
+    per_host_enqueue_ops = infeed_queue.generate_enqueue_ops(
+        per_host_sharded_inputs, tpu_ordinal_function=tpu_ordinal_function_impl)
+    return per_host_enqueue_ops
+
+  return enqueue_ops_fn, captured_infeed_queue
+
+
+def generate_per_host_enqueue_ops_fn_for_host(ctx, input_fn,
+                                              inputs_structure_recorder,
+                                              batch_axis, device, host_id):
+  """Generates infeed enqueue ops for per-host input_fn on a single host."""
+  captured_infeed_queue = _CapturedObject()
+
+  dataset_initializer = None
+
+  with tf.compat.v1.device(device):
+    user_context = tpu_context.TPUContext(
+        internal_ctx=ctx,
+        input_device=device,
+        invocation_index=host_id,
+        host_id=host_id)
+    inputs = _Inputs.from_input_fn(input_fn(user_context))
+
+    is_dataset = inputs.is_dataset
+    if ctx.mode == model_fn_lib.ModeKeys.PREDICT:
+      if not is_dataset:
+        raise TypeError(
+            'For mode PREDICT, `input_fn` must return `Dataset` instead of '
+            '`features` and `labels`.')
+      if batch_axis is not None:
+        raise TypeError('For mode PREDICT, batch_axis is not supported yet.')
+      inputs = _InputsWithStoppingSignals(
+          dataset=inputs.dataset,
+          batch_size=ctx.batch_size_for_input_fn,
+          add_padding=True)
+
+    if is_dataset:
+      dataset_initializer = inputs.dataset_initializer()
+
+    tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id)
+
+  def enqueue_ops_fn():
+    """A Fn returning the TPU infeed enqueue ops.
+
+    By providing as a Fn, it can be invoked inside the tf.while_loop such that
+    the input pipeline for multiple iterations can be executed by one
+    Session.run call.
+
+    Returns:
+      list of dict of ops.
+    """
+    with tf.compat.v1.device(device):
+      num_of_replicas_per_host = ctx.num_of_replicas_per_host
+      # Convert user input to features and labels.  If the user returns a
+      # dataset, it is initialized and the features and labels extracted via
+      # `dataset.iterator.get_next()`
+      features, labels = inputs.features_and_labels()
+      signals = inputs.signals()
+
+      features, labels, enqueue_datas_list = (
+          _tpu_estimator_embedding.split_inputs(
+              ctx,
+              features,
+              labels,
+              num_cores_per_batch=num_of_replicas_per_host))
+
+      inputs_structure_recorder.validate_and_record_structure(features, labels)
+      unsharded_tensor_list = (
+          inputs_structure_recorder.flatten_features_and_labels(
+              features, labels, signals))
+
+      infeed_queue = tpu_feed.InfeedQueue(
+          tuple_types=[t.dtype for t in unsharded_tensor_list],
+          tuple_shapes=[t.shape for t in unsharded_tensor_list],
+          shard_dimensions=batch_axis)
+      captured_infeed_queue.capture(infeed_queue)
+      infeed_queue.set_number_of_shards(num_of_replicas_per_host)
+      per_host_enqueue_ops = (
+          infeed_queue.split_inputs_and_generate_enqueue_ops(
+              unsharded_tensor_list,
+              placement_function=lambda x: device,
+              tpu_ordinal_function=tpu_ordinal_function_impl))
+
+      if ctx.embedding_config:
+        per_host_enqueue_ops.extend(
+            ctx.embedding_config.tpu_embedding.generate_enqueue_ops(
+                enqueue_datas_list))
+
+      if signals is None:
+        return per_host_enqueue_ops
+      else:
+        return {
+            'ops': per_host_enqueue_ops,
+            'signals': signals,
+        }
+
+  return enqueue_ops_fn, captured_infeed_queue, dataset_initializer
+
+
+def generate_per_host_v2_enqueue_ops_fn_for_host(ctx, input_fn,
+                                                 inputs_structure_recorder,
+                                                 device, host_id,
+                                                 invocation_index):
+  """Generates infeed enqueue ops for per-host input_fn on a single host."""
+  captured_infeed_queue = _CapturedObject()
+  dataset_initializer = None
+
+  with tf.compat.v1.device(device):
+    user_context = tpu_context.TPUContext(
+        internal_ctx=ctx,
+        input_device=device,
+        invocation_index=invocation_index,
+        host_id=host_id)
+    inputs = _Inputs.from_input_fn(input_fn(user_context))
+
+    is_dataset = inputs.is_dataset
+    if not is_dataset:
+      raise TypeError('`input_fn` must return a `Dataset` for the PER_HOST_V2 '
+                      'input pipeline configuration.')
+
+    # Be aware that when num_cores_per_replica > num_cores_per_host,
+    # ctx.num_of_replicas_per_host is 0.
+    if ctx.mode == model_fn_lib.ModeKeys.PREDICT:
+      inputs = _InputsWithStoppingSignals(
+          dataset=inputs.dataset,
+          batch_size=ctx.batch_size_for_input_fn,
+          add_padding=True,
+          num_invocations_per_step=max(1, ctx.num_of_replicas_per_host))
+
+    dataset_initializer = inputs.dataset_initializer()
+
+    tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id)
+
+    def device_function_impl(shard_id):
+      if ctx.device_assignment is not None:
+        # Find the replica_id of the host's logical core 0.
+        # The current host_id is guaranteed to contain the logical core 0,
+        # even when num_cores_per_replica > num_cores_per_host -- the function
+        # caller makes sure that this host_id will must be receiving data (calls
+        # input_fn).
+        replica_id = ctx.device_assignment.lookup_replicas(
+            task_id=host_id, logical_core=0)[shard_id]
+        return ctx.tpu_host_placement_function(replica_id=replica_id)
+      else:
+        return None
+
+  def enqueue_ops_fn():
+    """Generates the per_host enqueue ops."""
+    control_deps = []
+    per_host_sharded_inputs = []
+    enqueue_datas_list = []
+    # Be aware that when num_cores_per_replica > num_cores_per_host,
+    # ctx.num_of_replicas_per_host is 0.
+    num_replicas_per_host = max(1, ctx.num_of_replicas_per_host)
+    cached_signals = None
+    with tf.compat.v1.device(device):
+      if not inputs.is_dataset:
+        raise TypeError('`input_fn` must return a `Dataset` for this mode.')
+      for host in range(num_replicas_per_host):
+        # Use control dependencies to ensure a deterministic ordering.
+        if ctx.allow_per_host_v2_parallel_get_next:
+          features, labels = inputs.features_and_labels()  # Calls get_next()
+        with tf.control_dependencies(control_deps):
+          if not ctx.allow_per_host_v2_parallel_get_next:
+            features, labels = inputs.features_and_labels()  # Calls get_next()
+          signals = inputs.signals()
+
+          # All the replicas share the replica 0's stopping signal.
+          # This avoids inconsistent state among different model replcias.
+          if cached_signals:
+            signals['stopping'] = cached_signals['stopping']
+          else:
+            cached_signals = signals
+
+        features, labels, enqueue_data = (
+            _tpu_estimator_embedding.split_inputs(ctx, features, labels))
+        if len(enqueue_data) != 1:
+          raise RuntimeError(('Missing or extra enqueue_data for host {}. '
+                              'len(enqueue_data) = {}.').format(
+                                 host, len(enqueue_data)))
+        enqueue_datas_list.append(enqueue_data[0])
+
+        inputs_structure_recorder.validate_and_record_structure(
+            features, labels)
+        flattened_inputs = (
+            inputs_structure_recorder.flatten_features_and_labels(
+                features, labels, signals))
+        control_deps.extend(flattened_inputs)
+        per_host_sharded_inputs.append(flattened_inputs)
+
+      if inputs_structure_recorder.flattened_input_dims:
+        input_partition_dims = inputs_structure_recorder.flattened_input_dims
+        if signals:
+          input_partition_dims += [None] * len(signals)
+        # pylint: disable=protected-access
+        infeed_queue = tpu_feed._PartitionedInfeedQueue(
+            number_of_tuple_elements=len(per_host_sharded_inputs[0]),
+            host_id=host_id,
+            input_partition_dims=input_partition_dims,
+            device_assignment=ctx.device_assignment)
+        per_host_enqueue_ops = infeed_queue.generate_enqueue_ops(
+            per_host_sharded_inputs)
+      else:
+        infeed_queue = tpu_feed.InfeedQueue(
+            number_of_tuple_elements=len(per_host_sharded_inputs[0]))
+        per_host_enqueue_ops = infeed_queue.generate_enqueue_ops(
+            per_host_sharded_inputs,
+            tpu_ordinal_function=tpu_ordinal_function_impl,
+            placement_function=device_function_impl)
+
+      captured_infeed_queue.capture(infeed_queue)
+
+    if ctx.embedding_config:
+      per_host_enqueue_ops.extend(
+          ctx.embedding_config.tpu_embedding.generate_enqueue_ops(
+              enqueue_datas_list))
+
+    if signals is None:
+      return per_host_enqueue_ops
+    else:
+      return {
+          'ops': per_host_enqueue_ops,
+          'signals': signals,
+      }
+
+  return enqueue_ops_fn, captured_infeed_queue, dataset_initializer
+
+
+def generate_broadcast_enqueue_ops_fn(ctx, input_fn, inputs_structure_recorder,
+                                      num_hosts):
+  """Generates infeed enqueue ops for one input_fn on all the hosts."""
+  captured_infeed_queue = _CapturedObject()
+  dataset_initializer = None
+  device_0 = ctx.tpu_host_placement_function(host_id=0)
+  with tf.compat.v1.device(device_0):
+    user_context = tpu_context.TPUContext(
+        internal_ctx=ctx, input_device=device_0, invocation_index=0, host_id=0)
+    inputs = _Inputs.from_input_fn(input_fn(user_context))
+
+    is_dataset = inputs.is_dataset
+    if ctx.mode == model_fn_lib.ModeKeys.PREDICT:
+      if not is_dataset:
+        raise TypeError(
+            'For mode PREDICT, `input_fn` must return `Dataset` instead of '
+            '`features` and `labels`.')
+
+      inputs = _InputsWithStoppingSignals(
+          dataset=inputs.dataset,
+          batch_size=ctx.batch_size_for_input_fn,
+          add_padding=True)
+
+    if is_dataset:
+      dataset_initializer = inputs.dataset_initializer()
+    num_replicas_per_host = ctx.num_of_replicas_per_host
+
+  def tpu_ordinal_function_impl(shard_id):
+    if ctx.device_assignment:
+      return ctx.device_assignment.tpu_ordinal(replica=shard_id)
+    else:
+      return shard_id % num_replicas_per_host
+
+  def device_function_impl(shard_id):
+    # shard_id ranges from 0 to num_of_replicas_per_host - 1.
+    # A shard is a replica inside a host.
+    # In broadcast mode (generate_broadcast_enqueue_ops_fn), the enqueue ops
+    # are always executed on the first host. Thus shard_id equals to replica_id.
+    return ctx.tpu_host_placement_function(replica_id=shard_id)
+
+  def enqueue_ops_fn():
+    """Generates enqueue ops for all the hosts."""
+    broadcasted_inputs = []
+    flattened_inputs = None  # Cache result from input_fn.
+    signals = None
+    num_replicas = ctx.num_replicas
+    core_id = 0
+    for host_id in xrange(num_hosts):
+      with tf.compat.v1.device(
+          ctx.tpu_host_placement_function(host_id=host_id)):
+        for _ in xrange(ctx.num_of_replicas_per_host):
+          # Note: input_fn is only called once at host 0 for the first replica.
+          # The features and labels returned from that invocation are
+          # broadcasted to other replicas(including the replicas on other
+          # hosts).
+          if flattened_inputs is None:
+            features, labels = inputs.features_and_labels()  # Calls get_next()
+            signals = inputs.signals()
+
+            inputs_structure_recorder.validate_and_record_structure(
+                features, labels)
+            flattened_inputs = (
+                inputs_structure_recorder.flatten_features_and_labels(
+                    features, labels, signals))
+            if (ctx.config.tpu_config.eval_training_input_configuration is
+                tpu_config.InputPipelineConfig.SLICED):
+              input_slices = [
+                  tf.split(x, num_replicas) for x in flattened_inputs
+              ]
+          if (ctx.config.tpu_config.eval_training_input_configuration is
+              tpu_config.InputPipelineConfig.SLICED):
+            # for each core, slice out the flattened_inputs for each core.
+            broadcasted_inputs.append([x[core_id] for x in input_slices])
+            core_id += 1
+          else:
+            broadcasted_inputs.append(flattened_inputs)
+
+    infeed_queue = tpu_feed.InfeedQueue(
+        number_of_tuple_elements=len(broadcasted_inputs[0]))
+    captured_infeed_queue.capture(infeed_queue)
+    enqueue_ops = infeed_queue.generate_enqueue_ops(
+        broadcasted_inputs,
+        tpu_ordinal_function=tpu_ordinal_function_impl,
+        placement_function=device_function_impl)
+
+    if signals is None:
+      return enqueue_ops
+    else:
+      return {
+          'ops': enqueue_ops,
+          'signals': signals,
+      }
+
+  return enqueue_ops_fn, captured_infeed_queue, dataset_initializer
+
+
+class TensorPacker(object):
+  """Pack and unpack small tensors into a big one for efficiency."""
+
+  def __init__(self, small_feature_dim_size,
+               minimum_num_small_features_to_group):
+    self._small_feature_dim_size = small_feature_dim_size
+    self._minimum_num_small_features_to_group = (
+        minimum_num_small_features_to_group)
+
+  def maybe_concatenate_features(self, features):
+    """If there are enough small tensors, concat them for performance."""
+    self._small_feature_names = {}
+    self._small_feature_sizes = {}
+    feature_names = _extract_key_names(features)
+    if feature_names:  # Not a single tensor.
+      # First pass: see if it is worth concatenating the small features.
+      for name in feature_names:
+        tensor = features[name]
+        # We do not handle nested inputs here.
+        if not isinstance(tensor, tf.Tensor):
+          return
+        shape = tensor.get_shape().as_list()
+        dtype = tensor.dtype
+        if (len(shape) == 2 and shape[1] is not None and
+            shape[1] <= self._small_feature_dim_size):
+          tf.compat.v1.logging.log_first_n(
+              tf.compat.v1.logging.INFO,
+              'Found small feature: %s %s', 1, name, shape)
+          if tensor.dtype not in self._small_feature_names:
+            self._small_feature_names[dtype] = []
+            self._small_feature_sizes[dtype] = []
+          self._small_feature_names[dtype].append(name)
+          self._small_feature_sizes[dtype].append(shape[1])
+
+      dtypes_ = list(self._small_feature_names.keys())
+      for dtype in dtypes_:
+        # If we could find 5 (or more) [batch_size, 1] dense features,
+        # we will group them.
+        if (len(self._small_feature_names[dtype]) <
+            self._minimum_num_small_features_to_group):
+          self._small_feature_names.pop(dtype)  # reset
+          self._small_feature_sizes.pop(dtype)  # reset
+
+      # Second pass: separate small features out
+      small_feature_tensors = {}
+      for dtype in self._small_feature_names:
+        small_feature_tensors[dtype] = []
+        for name in self._small_feature_names[dtype]:
+          small_feature_tensors[dtype].append(features.pop(name))
+
+      # Add the concat Tensor to features with a special key.
+      for dtype in self._small_feature_names:
+        key = self._get_small_feature_key(dtype)
+        if key in features:
+          raise ValueError('{} is reserved as feature key for concatenated'
+                           'small features.')
+        features[key] = (tf.concat(small_feature_tensors[dtype], axis=1))
+
+  def maybe_split_features(self, maybe_concatenated_features):
+    for dtype in self._small_feature_names:
+      key = self._get_small_feature_key(dtype)
+      concatenated_small_features = maybe_concatenated_features.pop(key)
+      splits = tf.split(
+          concatenated_small_features, self._small_feature_sizes[dtype], axis=1)
+      for name, split in zip(self._small_feature_names[dtype], splits):
+        maybe_concatenated_features[name] = split
+
+  def _get_small_feature_key(self, dtype):
+    return _TENSOR_PACKER_CONCATENATED_SMALL_FEATURES_KEY + '_' + str(dtype)
+
+
+class _InputPipeline(object):
+  """`_InputPipeline` handles invoking `input_fn` and piping to infeed queue.
+
+  `_InputPipeline` abstracts the per-core/per-host `input_fn` invocation from
+  call site.  To be precise, based on the configuration in
+  `_InternalTPUContext`,  it invokes `input_fn` for all cores (usually
+  multi-host TPU training) or for one host (usually for single-host TPU
+  evaluation), and sends all `features` and `labels` returned by `input_fn` to
+  TPU infeed. For per-core invocation, `features` and `labels` are piped to
+  infeed directly, one tuple for each core. For per-host invocation,  `features`
+  and `labels` are split at host (with respect to `batch_axis`) and piped to all
+  cores accordingly.
+
+  In addition, flatten/unflatten are handled by `_InputPipeline` also.  Model
+  inputs returned by the `input_fn` can have one of the following forms:
+  1. features
+  2. (features, labels)
+  3. ((arbitrarily nested structure of features), labels)
+
+  Internally, form 1 is reformed to `(features, None)` as features and labels
+  are passed separately to underlying methods. For TPU training, TPUEstimator
+  may expect multiple `features` and `labels` tuples one for each core.
+
+  TPUEstimator allows various different structures for inputs (namely `features`
+  and `labels`).  Both `features` and `labels` can be any nested sturcture
+  supported by TF nest (namely, dict, tuples, namedtuples or any nested
+  structure of such of Tensors).  `labels` could be `None` as well.
+
+  These are flattened before they are passed to the infeed/outfeed library
+  as that expectes flattend lists.
+  """
+
+  class InputsStructureRecorder(object):
+    """The recorder to record inputs structure."""
+
+    def __init__(self, input_partition_dims=None):
+      # Holds the structure of inputs
+      self._feature_structure = {}
+      self._flattened_input_dims = None
+
+      if input_partition_dims:
+        # This should have been validated in TPUConfig.
+        assert len(input_partition_dims) <= 2, 'must have 1 or 2 elements.'
+        if len(input_partition_dims) == 2:
+          self._feature_dims, self._label_dims = input_partition_dims
+        else:
+          self._feature_dims = input_partition_dims[0]
+          self._label_dims = None
+
+        assert self._feature_dims is not None, ('input_partition_dims[0] must '
+                                                'not be None')
+      else:
+        self._feature_dims = None
+        self._label_dims = None
+
+      # Internal state.
+      self._initialized = False
+
+    @property
+    def flattened_input_dims(self):
+      assert self._initialized, 'InputsStructureRecorder is not initialized.'
+      return self._flattened_input_dims
+
+    def has_labels(self):
+      return 'labels' in self._feature_structure
+
+    def _flatten_input_dims(self, features, labels, feature_dims, label_dims):
+      """Flatten input dims with the same order as flattened input tensors."""
+
+      try:
+        flattened_input_dims = data_nest.flatten_up_to(features, feature_dims)
+      except TypeError as e:
+        raise ValueError(
+            'TPUConfig.input_partition_dims[0] mismatched the structure of'
+            ' features. input_partition_dims[0]: {}, features {}. {}'.format(
+                feature_dims, features, e))
+
+      if labels is not None:
+        if label_dims is not None:
+          try:
+            flattened_input_dims.extend(
+                data_nest.flatten_up_to(labels, self._label_dims))
+          except TypeError as e:
+            raise ValueError(
+                'TPUConfig.input_partition_dims[1] mismatched the structure of'
+                ' labels. input_partition_dims[1]: {}, labels: {}. {}'.format(
+                    label_dims, labels, e))
+        else:
+          num_label_tensors = len(data_nest.flatten(labels))
+          flattened_input_dims.extend([None] * num_label_tensors)
+      return flattened_input_dims
+
+    def validate_and_record_structure(self, features, labels):
+      """Validates and records the structure of `features` and `labels`."""
+      # Extract structure.
+      feature_names = _extract_key_names(features)
+      label_names = _extract_key_names(labels)
+
+      if not self._initialized:
+        # Record structure.
+        self._initialized = True
+        if self._feature_dims is not None:
+          feature_dims_names = _extract_key_names(self._feature_dims)
+          if feature_dims_names != feature_names:
+            raise ValueError(
+                'TPUConfig.input_partition_dims[0] mismatched feature'
+                ' keys. Expected {}, got {}'.format(feature_names,
+                                                    feature_dims_names))
+          label_dims_names = _extract_key_names(self._label_dims)
+          if self._label_dims is not None and label_dims_names != label_names:
+            raise ValueError(
+                'TPUConfig.input_partition_dims[1] mismatched label'
+                ' keys. Expected {}, got {}'.format(label_names,
+                                                    label_dims_names))
+          self._flattened_input_dims = self._flatten_input_dims(
+              features, labels, self._feature_dims, self._label_dims)
+
+    def flatten_features_and_labels(self, features, labels, signals=None):
+      """Flattens the `features` and `labels` to a single tensor list."""
+      self.tensor_packer = TensorPacker(
+          _TENSOR_PACKER_SMALL_FEATURE_DIM_SIZE,
+          _TENSOR_PACKER_MINIMUM_NUM_SMALL_FEATURES_TO_GROUP)
+      self.tensor_packer.maybe_concatenate_features(features)
+      self._feature_structure['features'] = features
+      if labels is not None:
+        self._feature_structure['labels'] = labels
+      if signals is not None:
+        self._feature_structure['signals'] = signals
+      return data_nest.flatten(self._feature_structure)
+
+    def unflatten_features_and_labels(self, flattened_inputs):
+      """Restores the flattened inputs to original features and labels form.
+
+      Args:
+        flattened_inputs: Flattened inputs for each shard.
+
+      Returns:
+        A tuple of (`features`, `labels`), where `labels` could be None.
+        Each one, if present, should have identical structure (single tensor vs
+        dict) as the one returned by input_fn.
+
+      Raises:
+        ValueError: If the number of expected tensors from `flattened_inputs`
+          mismatches the recorded structure.
+      """
+
+      unflattened_inputs = data_nest.pack_sequence_as(self._feature_structure,
+                                                      flattened_inputs)
+      features = unflattened_inputs['features']
+      self.tensor_packer.maybe_split_features(features)
+      return _Inputs(
+          features,
+          unflattened_inputs.get('labels'),
+          signals=unflattened_inputs.get('signals'))
+
+  def __init__(self, input_fn, batch_axis, ctx):
+    """Constructor.
+
+    Args:
+      input_fn: input fn for train or eval.
+      batch_axis: A python tuple of int values describing how each tensor
+        produced by the Estimator `input_fn` should be split across the TPU
+        compute shards.
+      ctx: A `_InternalTPUContext` instance with mode.
+
+    Raises:
+      ValueError: If both `sharded_features` and `num_cores` are `None`.
+    """
+    self._inputs_structure_recorder = _InputPipeline.InputsStructureRecorder(
+        ctx.input_partition_dims)
+
+    self._sharded_per_core = ctx.is_input_sharded_per_core()
+    self._input_fn = input_fn
+    self._infeed_queue = None
+    self._ctx = ctx
+    self._batch_axis = batch_axis
+
+  def generate_infeed_enqueue_ops_and_dequeue_fn(self):
+    """Generates infeed enqueue ops and dequeue_fn."""
+    # While tf.while_loop is called, the body function, which invokes
+    # `enqueue_fn` passed in, is called to construct the graph. So, input_fn
+    # structure is recorded.
+    enqueue_ops, all_hooks, run_infeed_loop_on_coordinator = (
+        self._invoke_input_fn_and_record_structure())
+
+    self._validate_input_pipeline()
+
+    def dequeue_fn():
+      """dequeue_fn is used by TPU to retrieve the tensors."""
+      # In the model-parallel case, both the host-side and device-side
+      # computations must agree on the core on which infeed takes place. We
+      # choose to perform infeed on logical core 0 of each replica.
+      values = self._infeed_queue.generate_dequeue_op(tpu_device=0)
+      # The unflatten process uses the structure information recorded above.
+      return self._inputs_structure_recorder.unflatten_features_and_labels(
+          values)
+
+    return (enqueue_ops, dequeue_fn, all_hooks, run_infeed_loop_on_coordinator)
+
+  def _invoke_input_fn_and_record_structure(self):
+    """Deploys the input pipeline and record input structure."""
+    enqueue_ops = []
+    infeed_queues = []
+    all_dataset_initializers = []
+    num_hosts = self._ctx.num_hosts
+    tpu_host_placement_fn = self._ctx.tpu_host_placement_function
+
+    run_infeed_loop_on_coordinator = True
+
+    if self._sharded_per_core:
+      # Per-Core input pipeline deployment.
+      # Invoke input pipeline for each core and placed on the corresponding
+      # host.
+      for host_id in range(num_hosts):
+        host_device = tpu_host_placement_fn(host_id=host_id)
+        with tf.compat.v1.device(host_device):
+          with ops.name_scope('input_pipeline_task%d' % (host_id)):
+            enqueue_ops_fn, captured_infeed_queue = (
+                generate_per_core_enqueue_ops_fn_for_host(
+                    self._ctx, self._input_fn, self._inputs_structure_recorder,
+                    host_device, host_id))
+
+            if _WRAP_INPUT_FN_INTO_WHILE_LOOP:
+              run_infeed_loop_on_coordinator = False
+              enqueue_ops.append(
+                  _wrap_computation_in_while_loop(
+                      device=host_device, op_fn=enqueue_ops_fn))
+            else:
+              enqueue_ops.append(enqueue_ops_fn())
+            # Infeed_queue_getter must be called after enqueue_ops_fn is called.
+            infeed_queues.append(captured_infeed_queue.get())
+
+    elif self._ctx.is_input_broadcast_with_iterators():
+      # Only calls input_fn in host 0.
+      host_device = tpu_host_placement_fn(host_id=0)
+      enqueue_ops_fn, captured_infeed_queue, dataset_initializer = (
+          generate_broadcast_enqueue_ops_fn(self._ctx, self._input_fn,
+                                            self._inputs_structure_recorder,
+                                            num_hosts))
+      if dataset_initializer:
+        all_dataset_initializers.append(dataset_initializer)
+        run_infeed_loop_on_coordinator = False
+        wrap_fn = (
+            _wrap_computation_in_while_loop
+            if self._ctx.mode != model_fn_lib.ModeKeys.PREDICT else
+            _wrap_computation_in_while_loop_with_stopping_signals)
+        enqueue_ops.append(wrap_fn(device=host_device, op_fn=enqueue_ops_fn))
+      else:
+        enqueue_ops.append(enqueue_ops_fn())
+      infeed_queues.append(captured_infeed_queue.get())
+
+    else:
+      # This branch handles two senarios:
+      #       num_cores_per_replica > num_cores_per_host
+      #   and num_cores_per_replica <= num_cores_per_host
+      # First, get the set of host_ids, by iterating replicas.
+      # We only want and will get the set of *unique* host_ids
+      # *that will call input_fn*. For each replica, we only call the input_fn
+      # from the CPU host that contains logical core 0.
+
+      # Use a list here to ensure deterministic order.
+      host_id_with_invocation_id_pair = []
+
+      if not self._ctx.is_replica_across_hosts():
+        for host_id in range(num_hosts):
+          invocation_index = host_id
+          host_id_with_invocation_id_pair.append((host_id, invocation_index))
+      else:
+        for replica_id in xrange(self._ctx.num_replicas):
+          invocation_index = replica_id
+          host_device, _ = self._ctx.device_for_replica(replica_id)
+          # TODO(lehou): Get host_id in a better way.
+          host_id = int(host_device.split('/task:')[1].split('/device:')[0])
+          host_id_with_invocation_id_pair.append((host_id, invocation_index))
+
+      for (host_id, invocation_index) in host_id_with_invocation_id_pair:
+        host_device = tpu_host_placement_fn(host_id=host_id)
+        with tf.compat.v1.device(host_device):
+          with ops.name_scope('input_pipeline_task%d' % (host_id)):
+            if self._ctx.is_input_per_host_with_iterators():
+              enqueue_ops_fn, captured_infeed_queue, dataset_initializer = (
+                  generate_per_host_v2_enqueue_ops_fn_for_host(
+                      self._ctx, self._input_fn,
+                      self._inputs_structure_recorder, host_device, host_id,
+                      invocation_index))
+            else:
+              enqueue_ops_fn, captured_infeed_queue, dataset_initializer = (
+                  generate_per_host_enqueue_ops_fn_for_host(
+                      self._ctx, self._input_fn,
+                      self._inputs_structure_recorder, self._batch_axis,
+                      host_device, host_id))
+
+            # NOTE(xiejw): We dispatch here based on the return type of the
+            # users `input_fn`.
+            #
+            # 1. If input_fn returns a Dataset instance, we initialize the
+            # iterator outside of tf.while_loop, and call the iterator.get_next
+            # inside tf.while_loop.  This should be always safe.
+            #
+            # 2. If input_fn returns (features, labels), it is too late to wrap
+            # them inside tf.while_loop, as resource initialization cannot be
+            # handled in TF control flow properly. In this case, we will use
+            # python loop to enqueue the data into TPU system.  This may be
+            # slow compared to the previous case.
+            if dataset_initializer:
+              all_dataset_initializers.append(dataset_initializer)
+              run_infeed_loop_on_coordinator = False
+              wrap_fn = (
+                  _wrap_computation_in_while_loop
+                  if self._ctx.mode != model_fn_lib.ModeKeys.PREDICT else
+                  _wrap_computation_in_while_loop_with_stopping_signals)
+              enqueue_ops.append(
+                  wrap_fn(device=host_device, op_fn=enqueue_ops_fn))
+            else:
+              enqueue_ops.append(enqueue_ops_fn())
+            infeed_queues.append(captured_infeed_queue.get())
+
+    # infeed_queue is used to generate dequeue ops. The only thing it uses for
+    # dequeue is dtypes and types. So, any one can be used. Here, grab the
+    # first one.
+    self._infeed_queue = infeed_queues[0]
+    return enqueue_ops, [
+        util_lib.MultiHostDatasetInitializerHook(all_dataset_initializers)
+    ], run_infeed_loop_on_coordinator
+
+  def _validate_input_pipeline(self):
+    """Validates the input pipeline.
+
+    Perform some sanity checks to log user friendly information. We should
+    error out to give users better error message. But, if
+    _WRAP_INPUT_FN_INTO_WHILE_LOOP is False (legacy behavior), we cannot break
+    user code, so, log a warning.
+
+    Raises:
+      RuntimeError: If the validation failed.
+    """
+    if tf.compat.v1.get_default_graph().get_collection(
+        tf.compat.v1.GraphKeys.QUEUE_RUNNERS):
+      err_msg = ('Input pipeline contains one or more QueueRunners. '
+                 'It could be slow and not scalable. Please consider '
+                 'converting your input pipeline to use `tf.data` instead (see '
+                 'https://www.tensorflow.org/guide/datasets for '
+                 'instructions.')
+      if _WRAP_INPUT_FN_INTO_WHILE_LOOP:
+        raise RuntimeError(err_msg)
+      else:
+        logging.warn(err_msg)
+
+
+def call_computation(computation_inputs, computation, batch_config=None):
+  """Call computation.
+
+  Args:
+    computation_inputs: A tensor or dict of tensors, the inputs to the
+      computation.
+    computation: A Python function that takes no inputs and builds computation
+      graph. If `computation` returns m outputs, this function will return a
+      list of m Tensors.
+    batch_config: A BatchConfig named tuple specifying the batching
+      configuration to use for inference batching.
+
+  Returns:
+    A list of output tensors.
+  """
+
+  # Using `TPUPartitionedCall` makes it possible to target a different
+  # TPU core with every `Session.run()` call. Note that the entire inference
+  # graph executes on a single core, and that invocations of this graph
+  # will round-robin among the cores attached to a host.
+  def tpu_partitioned_call(partition_inputs):
+
+    # capture_resource_var_by_value enables variables to be mirrored on TPU
+    # to avoid fetching from CPU, since variables do not change during
+    # inference.
+    @function.Defun(capture_resource_var_by_value=False)
+    def tpu_subgraph():
+      return computation(partition_inputs)
+
+    return tpu_functional.TPUPartitionedCall(
+        args=tpu_subgraph.captured_inputs,
+        device_ordinal=tpu_ops.tpu_ordinal_selector(),
+        Tout=[o.type for o in tpu_subgraph.definition.signature.output_arg],
+        f=tpu_subgraph)
+
+  # Not using Batching Function but use TPUPartitionedCall/all cores.
+  if not batch_config:
+    return tpu_partitioned_call(computation_inputs)
+
+  # Use Batching Function and TPUPartitionedCall/all cores.
+  # Note that BatchingFunction requires a list of tensors and doesn't support
+  # a dict of tensors. So we preserve the structure by deterministically
+  # flattening the dict before batching and then recomposing it after batching
+  # to feed into the computation.
+  ordered_inputs_list = tf.nest.flatten(computation_inputs)
+
+  @tf.nondifferentiable_batch_function(
+      num_batch_threads=batch_config.num_batch_threads,
+      max_batch_size=batch_config.max_batch_size,
+      batch_timeout_micros=batch_config.batch_timeout_micros,
+      allowed_batch_sizes=batch_config.allowed_batch_sizes,
+      max_enqueued_batches=batch_config.max_enqueued_batches,
+      autograph=False)
+  def batched_tpu_computation(*tensor_args):
+    """Recompose the input feature dict and calls the TPU computation."""
+    computation_feature_input = tf.nest.pack_sequence_as(
+        computation_inputs, tensor_args)
+    return tpu_partitioned_call(computation_feature_input)
+
+  return batched_tpu_computation(*ordered_inputs_list)
+
+
+class _ModelFnWrapper(object):
+  """A `model_fn` wrapper.
+
+  This makes calling model_fn on CPU and TPU easier and more consistent and
+  performs necessary check and mutation required by TPU training and evaluation.
+
+  In addition, this wrapper manages converting the `model_fn` to a single TPU
+  train and eval step.
+  """
+
+  def __init__(self, model_fn, config, params, ctx):
+    self._model_fn = model_fn
+    self._config = config
+    self._params = params
+    self._ctx = ctx
+
+  def call_without_tpu(self, features, labels, is_export_mode):
+    return self._call_model_fn(features, labels, is_export_mode=is_export_mode)
+
+  def _add_embedding_features(self, features, hook_dummy_table_variables):
+    """Add embedding features, optionally add hook to intercept gradient."""
+    if self._ctx.embedding_config:
+      tpu_embedding_ = self._ctx.embedding_config.tpu_embedding
+      embedding_activations = tpu_embedding_.get_activations()
+      if hook_dummy_table_variables:
+        new_embedding_activations = (
+            tpu_embedding_gradient.hook_dummy_table_variables_to_activations(
+                tpu_embedding_, embedding_activations,
+                self._ctx.embedding_config.dummy_table_variables))
+        features.update(new_embedding_activations)
+      else:
+        features.update(embedding_activations)
+
+  def convert_to_single_tpu_train_step(self, dequeue_fn):
+    """Converts user provided model_fn` as a single train step on TPU.
+
+    The user provided `model_fn` takes input tuple
+    (features, labels) and produces the EstimatorSpec with train_op and loss for
+    train `mode`. This usually represents a single train computation on CPU.
+
+    For TPU training, a train (computation) step is first wrapped in a
+    tf.while_loop control flow to repeat for many times and then replicated to
+    all TPU shards. Besides the input should be taken from TPU infeed rather
+    than input pipeline (input_fn) directly. To fit TPU loop and replicate
+    pattern, the original train computation should be reformed, which is the
+    returned `train_step`.
+
+    Args:
+      dequeue_fn: The function to retrieve inputs, features and labels, from TPU
+        infeed dequeue channel.
+
+    Returns:
+      A tuple of train_fn, host_calls, and captured scaffold_fn. The train_fn
+      representing the train step for TPU.
+    """
+
+    host_call = _OutfeedHostCall(
+        self._ctx,
+        outfeed_every_n_steps=self._config.tpu_config
+        .experimental_host_call_every_n_steps)
+    captured_scaffold_fn = _CapturedObject()
+    captured_training_hooks = _CapturedObject()
+
+    def train_step(step):
+      """Training step function for use inside a while loop."""
+      inputs = dequeue_fn()
+      features, labels = inputs.features_and_labels()
+      self._add_embedding_features(features, True)
+
+      estimator_spec = self._verify_estimator_spec(
+          self._call_model_fn(features, labels))
+      loss, train_op = estimator_spec.loss, estimator_spec.train_op
+
+      if tensor_tracer.TensorTracer.is_enabled():
+        tt = tensor_tracer.TensorTracer()
+        loss = tt.trace_tpu(tf.compat.v1.get_default_graph(), loss, train_op,
+                            self._ctx.num_replicas)
+        tracer_host_call = tt.host_call_deps_and_fn()
+      else:
+        tracer_host_call = {}
+
+      if isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec):  # pylint: disable=protected-access
+        captured_scaffold_fn.capture(estimator_spec.scaffold_fn)
+      else:
+        captured_scaffold_fn.capture(None)
+
+      captured_training_hooks.capture(estimator_spec.training_hooks)
+
+      if self._ctx.embedding_config is None:
+        apply_sparse_grads = []
+      else:
+        tpu_embedding_ = self._ctx.embedding_config.tpu_embedding
+        gradients = (
+            tpu_embedding_gradient.get_gradients_through_dummy_table_variables(
+                tpu_embedding_))
+        grad_multiplier = self._ctx.embedding_config.get_grad_multiplier()
+        if grad_multiplier is not None:
+          scaled_gradients = collections.OrderedDict(
+              (k, v * grad_multiplier) for k, v in six.iteritems(gradients))
+        else:
+          scaled_gradients = gradients
+        apply_sparse_grads = [
+            tpu_embedding_.generate_send_gradients_op(
+                scaled_gradients, tf.compat.v1.train.get_global_step())
+        ]
+
+      stopping_signals = None
+      user_provided_stopping_signals_name = None
+      if self._ctx.feed_hook is not None:
+        stopping_signals, user_provided_stopping_signals_name = \
+          self._ctx.feed_hook.get_stopping_signals_and_name(features)
+
+      # We must run train_op to update the variables prior to running the
+      # outfeed.
+      with tf.control_dependencies([train_op] + apply_sparse_grads):
+        host_call_outfeed_ops = []
+        host_call_fn, host_call_args = None, []
+
+        if (isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec)  # pylint: disable=protected-access
+            and estimator_spec.host_call is not None):
+          host_call_fn, host_call_args = estimator_spec.host_call
+
+        if stopping_signals is not None:
+          identity_fn = lambda **kwargs: kwargs
+          tracer_host_call[user_provided_stopping_signals_name] = [
+              identity_fn, stopping_signals
+          ]
+
+        if host_call_fn:
+          # Ignore dummy hostcalls (no arguments)
+          if host_call_args:
+            tracer_host_call.update({'host_call': estimator_spec.host_call})
+            host_call.record(tracer_host_call)
+            host_call_outfeed_ops = host_call.create_enqueue_op(step)
+          elif tracer_host_call:
+            host_call.record(tracer_host_call)
+            host_call_outfeed_ops = host_call.create_enqueue_op(step)
+        else:
+          # Create a host call for the loss to track execution progress
+          # Without this, we don't have any indication of the state of the
+          # TPU program.
+          tracer_host_call.update(
+              {'host_call': (lambda loss_t: loss_t, [tf.reshape(loss, [1])])})
+          host_call.record(tracer_host_call)
+          host_call_outfeed_ops = host_call.create_enqueue_op(step)
+
+        with tf.control_dependencies(host_call_outfeed_ops):
+          return tf.identity(loss)
+
+    return (train_step, host_call, captured_scaffold_fn,
+            captured_training_hooks)
+
+  def convert_to_single_tpu_eval_step(self, dequeue_fn):
+    """Converts user provided model_fn` as a single eval step on TPU.
+
+    Similar to training, the user provided `model_fn` takes input tuple
+    (features, labels) and produces the TPUEstimatorSpec with eval_metrics for
+    eval `mode`. This usually represents a single evaluation computation on CPU.
+
+    For TPU evaluation, a eval (computation) step is first wrapped in a
+    tf.while_loop control flow to repeat for many times and then replicated to
+    all TPU shards. Besides the input and output are slightly different. Input,
+    features and labels, should be taken from TPU infeed rather than input
+    pipeline (input_fn) directly. Output is managed in two stages.  First, the
+    model outputs as the result of evaluation computation, usually model logits,
+    should be transferred from TPU system to CPU. Then, all model outputs are
+    concatenated first on CPU and sent to the metric_fn for metrics computation.
+    To fit TPU evaluation pattern, the original eval computation should be
+    reformed, which is the returned `eval_step`.
+
+    Args:
+      dequeue_fn: The function to retrieve inputs, features and labels, from TPU
+        infeed dequeue channel.
+
+    Returns:
+      A tuple of eval_fn, host_calls, and captured scaffold_fn. The eval_fn
+      representing the eval step for TPU.
+    """
+    host_calls = _OutfeedHostCall(self._ctx)
+    captured_scaffold_fn = _CapturedObject()
+    captured_eval_hooks = _CapturedObject()
+
+    def eval_step(total_loss):
+      """Evaluation step function for use inside a while loop."""
+      inputs = dequeue_fn()
+      features, labels = inputs.features_and_labels()
+      self._add_embedding_features(features, False)
+
+      tpu_estimator_spec = self._call_model_fn(features, labels)
+      if not isinstance(tpu_estimator_spec, model_fn_lib._TPUEstimatorSpec):  # pylint: disable=protected-access
+        raise RuntimeError(
+            'estimator_spec used by TPU evaluation must have type'
+            '`TPUEstimatorSpec`. Got {}'.format(type(tpu_estimator_spec)))
+
+      loss = tpu_estimator_spec.loss
+      captured_scaffold_fn.capture(tpu_estimator_spec.scaffold_fn)
+      captured_eval_hooks.capture(tpu_estimator_spec.evaluation_hooks)
+
+      to_record = {}
+      if tpu_estimator_spec.eval_metrics:
+        to_record['eval_metrics'] = tpu_estimator_spec.eval_metrics
+      if tpu_estimator_spec.host_call is not None:
+        # We assume that evaluate won't update global step, so we don't wrap
+        # this host_call.
+        to_record['host_call'] = tpu_estimator_spec.host_call
+      host_calls.record(to_record)
+
+      with tf.control_dependencies(host_calls.create_enqueue_op()):
+        return tf.math.add(total_loss, loss)
+
+    return eval_step, host_calls, captured_scaffold_fn, captured_eval_hooks
+
+  def convert_to_single_tpu_predict_step(self, dequeue_fn):
+    """Converts user provided model_fn` as a single predict step on TPU.
+
+    Args:
+      dequeue_fn: The function to retrieve inputs, features and labels, from TPU
+        infeed dequeue channel.
+
+    Returns:
+      A tuple of predict_fn, host_calls, and captured scaffold_fn. The
+      predict_fn representing the predict step for TPU.
+    """
+    host_calls = _OutfeedHostCall(self._ctx)
+    captured_scaffold_fn = _CapturedObject()
+    captured_predict_hooks = _CapturedObject()
+
+    def predict_step(unused_scalar_stopping_signal):
+      """Evaluation step function for use inside a while loop."""
+      inputs = dequeue_fn()
+      features, labels = inputs.features_and_labels()
+      stopping_signals = inputs.signals()
+
+      assert stopping_signals is not None, (
+          'Internal Error: `signals` is missing.')
+
+      tpu_estimator_spec = self._call_model_fn(
+          features, labels, is_export_mode=False)
+      if not isinstance(tpu_estimator_spec, model_fn_lib._TPUEstimatorSpec):  # pylint: disable=protected-access
+        raise RuntimeError(
+            'estimator_spec used by TPU prediction must have type'
+            '`TPUEstimatorSpec`. Got {}'.format(type(tpu_estimator_spec)))
+
+      self._verify_tpu_spec_predictions(tpu_estimator_spec.predictions)
+
+      captured_scaffold_fn.capture(tpu_estimator_spec.scaffold_fn)
+      captured_predict_hooks.capture(tpu_estimator_spec.prediction_hooks)
+      to_record = {}
+      identity_fn = lambda **kwargs: kwargs
+      to_record['predictions'] = [identity_fn, tpu_estimator_spec.predictions]
+      to_record['signals'] = [identity_fn, stopping_signals]
+      if tpu_estimator_spec.host_call is not None:
+        to_record['host_call'] = tpu_estimator_spec.host_call
+      host_calls.record(to_record)
+
+      with tf.control_dependencies(host_calls.create_enqueue_op()):
+        return _StopSignals.as_scalar_stopping_signal(stopping_signals)
+
+    return (predict_step, host_calls, captured_scaffold_fn,
+            captured_predict_hooks)
+
+  def _verify_tpu_spec_predictions(self, predictions):
+    """Validates TPUEstimatorSpec.predictions dict."""
+    # TODO(xiejw): Adds validation for prediction dictionrary.
+    # TODO(xiejw): Adds support for single tensor as predictions.
+    if not isinstance(predictions, dict):
+      raise TypeError('TPUEstimatorSpec.predictions must be dict of Tensors.')
+
+    for (key, tensor) in predictions.items():
+      if tensor.shape.dims[0].value is None:
+        raise ValueError(
+            'The tensor with key ({}) in TPUEstimatorSpec.predictions has '
+            'dynamic shape (should be static). Tensor: {}'.format(key, tensor))
+    return predictions
+
+  def _validate_model_features_and_labels(self, features, labels,
+                                          is_export_mode):
+    """Validates that the features and labels for the model function are valid.
+
+    A valid features/labels object is the one with:
+    - Type: A tensor or any nested structure of tensors supported by TF nest,
+        namely nested dictionary, tuple, namedtuple, or sequence of tensors.
+    - Static shape if is_export_mode is False.
+
+    Args:
+      features: the features that would be input to the model function.
+      labels: the labels that would be input to the model function.
+      is_export_mode: boolean value specifying if in export mode.
+
+    Raises:
+      TypeError: If features/labels are not of the correct type.
+      ValueError: If features/labels have dynamic shape.
+    """
+
+    def validate(obj, obj_name):
+      """Helper validate function."""
+      if is_export_mode or self._ctx.is_running_on_cpu(is_export_mode):
+        return
+      if isinstance(obj, tf.Tensor):
+        if not obj.get_shape().is_fully_defined():
+          raise ValueError(
+              'The {} to the model returned by input_fn must have static shape.'
+              ' Tensor: {}'.format(obj_name, obj))
+      else:
+        for tensor in data_nest.flatten(obj):
+          if not tensor.get_shape().is_fully_defined():
+            raise ValueError(
+                ('The {} to the model returned by input_fn must have static '
+                 'shape. Tensor: {}').format(obj_name, tensor))
+
+    validate(features, 'features')
+    if labels is not None:
+      validate(labels, 'labels')
+
+  def _call_model_fn(self, features, labels, is_export_mode=False):
+    """Calls the model_fn with required parameters."""
+    self._validate_model_features_and_labels(features, labels, is_export_mode)
+    model_fn_args = function_utils.fn_args(self._model_fn)
+    kwargs = {}
+
+    # Makes deep copy with `config` and params` in case user mutates them.
+    config = copy.deepcopy(self._config)
+    params = copy.deepcopy(self._params)
+
+    if 'labels' in model_fn_args:
+      kwargs['labels'] = labels
+    elif labels is not None:
+      raise ValueError(
+          'model_fn does not take labels, but input_fn returns labels.')
+    if 'mode' in model_fn_args:
+      kwargs['mode'] = self._ctx.mode
+    if 'config' in model_fn_args:
+      kwargs['config'] = config
+    if 'params' in model_fn_args:
+      kwargs['params'] = params
+
+    if 'params' not in model_fn_args:
+      raise ValueError('model_fn ({}) does not include params argument, '
+                       'required by TPUEstimator to pass batch size as '
+                       'params[\'batch_size\']'.format(self._model_fn))
+
+    if is_export_mode:
+      batch_size_for_model_fn = None
+    else:
+      batch_size_for_model_fn = self._ctx.batch_size_for_model_fn
+
+    if batch_size_for_model_fn is not None:
+      _add_item_to_params(params, _BATCH_SIZE_KEY, batch_size_for_model_fn)
+
+    running_on_cpu = self._ctx.is_running_on_cpu(is_export_mode)
+    # In export mode, params['use_tpu'] has already been set based on mode
+    # (i.e. True for _REWRITE_FOR_INFERENCE_MODE, False otherwise).
+    if not is_export_mode:
+      _add_item_to_params(params, _USE_TPU_KEY, not running_on_cpu)
+
+    if not running_on_cpu:
+      user_context = tpu_context.TPUContext(
+          internal_ctx=self._ctx, call_from_input_fn=False)
+      _add_item_to_params(params, _CTX_KEY, user_context)
+
+    estimator_spec = self._model_fn(features=features, **kwargs)
+    if (running_on_cpu and
+        isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec)):  # pylint: disable=protected-access
+      # The estimator_spec will be passed to `Estimator` directly, which expects
+      # type `EstimatorSpec`. As we are running on the CPU, escape
+      # the TPUInferenceContext.
+      graph_context = tf.compat.v1.get_default_graph(
+      )._get_control_flow_context()
+      try:
+        if isinstance(graph_context, tpu._TPUInferenceContext):
+          tf.compat.v1.get_default_graph()._set_control_flow_context(
+              graph_context.outer_context)
+        return estimator_spec.as_estimator_spec()
+      finally:
+        tf.compat.v1.get_default_graph()._set_control_flow_context(
+            graph_context)
+    else:
+      return estimator_spec
+
+  def _verify_estimator_spec(self, estimator_spec):
+    """Validates the estimator_spec."""
+    if isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec):  # pylint: disable=protected-access
+      return estimator_spec
+
+    err_msg = '{} returned by EstimatorSpec is not supported in TPUEstimator.'
+    if estimator_spec.training_chief_hooks:
+      raise ValueError(
+          err_msg.format('training_chief_hooks') + 'If you want' +
+          ' to pass training hooks, please pass via training_hooks.')
+
+    if estimator_spec.scaffold:
+      tf.compat.v1.logging.warn(
+          'EstimatorSpec.Scaffold is ignored by TPU train/eval. '
+          'Please use TPUEstimatorSpec.')
+    return estimator_spec
+
+
+class _OutfeedHostCall(object):
+  """Support for `eval_metrics` and `host_call` in TPUEstimatorSpec."""
+
+  def __init__(self, ctx, outfeed_every_n_steps=1):
+    self._ctx = ctx
+    self._names = []
+    # All of these are dictionaries of lists keyed on the name.
+    self._host_fns = {}
+    self._tensor_keys = collections.defaultdict(list)
+    self._tensors = collections.defaultdict(list)
+    self._tensor_dtypes = collections.defaultdict(list)
+    self._tensor_shapes = collections.defaultdict(list)
+    self._outfeed_every_n_steps = outfeed_every_n_steps
+
+  @staticmethod
+  def validate(host_calls):
+    """Validates the `eval_metrics` and `host_call` in `TPUEstimatorSpec`."""
+
+    for name, host_call in host_calls.items():
+      if not isinstance(host_call, (tuple, list)):
+        raise ValueError('{} should be tuple or list'.format(name))
+      if len(host_call) != 2:
+        raise ValueError('{} should have two elements.'.format(name))
+      if not callable(host_call[0]):
+        raise TypeError('{}[0] should be callable.'.format(name))
+      if not isinstance(host_call[1], (tuple, list, dict)):
+        raise ValueError('{}[1] should be tuple or list, or dict.'.format(name))
+
+      if isinstance(host_call[1], (tuple, list)):
+        fullargspec = tf_inspect.getfullargspec(host_call[0])
+        fn_args = function_utils.fn_args(host_call[0])
+        # wrapped_hostcall_with_global_step uses varargs, so we allow that.
+        if fullargspec.varargs is None and len(host_call[1]) != len(fn_args):
+          raise RuntimeError(
+              'In TPUEstimatorSpec.{}, length of tensors {} does not match '
+              'method args of the function, which takes {}.'.format(
+                  name, len(host_call[1]), len(fn_args)))
+
+  @staticmethod
+  def create_cpu_hostcall(host_calls):
+    """Runs on the host_call on CPU instead of TPU when use_tpu=False."""
+
+    _OutfeedHostCall.validate(host_calls)
+    ret = {}
+    for name, host_call in host_calls.items():
+      host_fn, tensors = host_call
+      if isinstance(tensors, (tuple, list)):
+        ret[name] = host_fn(*tensors)
+      else:
+        # Must be dict.
+        try:
+          ret[name] = host_fn(**tensors)
+        except TypeError as e:
+          tf.compat.v1.logging.warn(
+              'Exception while calling %s: %s. It is likely the tensors '
+              '(%s[1]) do not match the '
+              'function\'s arguments', name, e, name)
+          raise
+    return ret
+
+  def record(self, host_calls):
+    """Records the host_call structure."""
+
+    for name, host_call in host_calls.items():
+      host_fn, tensor_list_or_dict = host_call
+      self._names.append(name)
+      self._host_fns[name] = host_fn
+
+      if isinstance(tensor_list_or_dict, dict):
+        for (key, tensor) in six.iteritems(tensor_list_or_dict):
+          self._tensor_keys[name].append(key)
+          self._tensors[name].append(tensor)
+          self._tensor_dtypes[name].append(tensor.dtype)
+          self._tensor_shapes[name].append(tensor.shape)
+      else:
+        # List or tuple.
+        self._tensor_keys[name] = None
+        for tensor in tensor_list_or_dict:
+          self._tensors[name].append(tensor)
+          self._tensor_dtypes[name].append(tensor.dtype)
+          self._tensor_shapes[name].append(tensor.shape)
+
+  def create_enqueue_op(self, step=None):
+    """Create the op to enqueue the recorded host_calls.
+
+    Returns:
+      A list of enqueue ops, which is empty if there are no host calls.
+    """
+    if not self._names:
+      return []
+
+    tensors = []
+    # TODO(jhseu): Consider deduping tensors.
+    for name in self._names:
+      tensors.extend(self._tensors[name])
+
+    if self._outfeed_every_n_steps > 1 and step is None:
+      raise ValueError('If outfeed is requested every n steps, you must pass '
+                       'a tensor whose value is the step number within the '
+                       'current training loop.')
+    with tf.compat.v1.device(tf.compat.v1.tpu.core(0)):
+      if self._outfeed_every_n_steps == 1:
+        return [tpu_ops.outfeed_enqueue_tuple(tensors)]
+      else:
+        return [
+            tf.compat.v1.cond(
+                tf.math.equal(
+                    tf.math.floormod(step, self._outfeed_every_n_steps),
+                    0), lambda: tpu_ops.outfeed_enqueue_tuple(tensors),
+                lambda: tf.no_op())
+        ]
+
+  def create_tpu_hostcall(self):
+    """Sends the tensors through outfeed and runs the host_fn on CPU.
+
+    The tensors are concatenated along dimension 0 to form a global tensor
+    across all shards. The concatenated function is passed to the host_fn and
+    executed on the first host.
+
+    Returns:
+      A dictionary mapping name to the return type of the host_call by that
+      name.
+
+    Raises:
+      RuntimeError: If outfeed tensor is scalar.
+    """
+    if not self._names:
+      return {}
+
+    ret = {}
+    # For each i, dequeue_ops[i] is a list containing the tensors from all
+    # shards. This list is concatenated later.
+    dequeue_ops = []
+    tensor_dtypes = []
+    tensor_shapes = []
+    for name in self._names:
+      for _ in self._tensors[name]:
+        dequeue_ops.append([])
+      for dtype in self._tensor_dtypes[name]:
+        tensor_dtypes.append(dtype)
+      for shape in self._tensor_shapes[name]:
+        tensor_shapes.append(shape)
+
+    # Outfeed ops execute on each replica's first logical core. Note: we must
+    # constraint it such that we have at most one outfeed dequeue and enqueue
+    # per replica.
+    for i in xrange(self._ctx.num_replicas):
+      host_device, ordinal_id = self._ctx.device_for_replica(i)
+      with tf.compat.v1.device(host_device):
+        outfeed_tensors = tpu_ops.outfeed_dequeue_tuple(
+            dtypes=tensor_dtypes,
+            shapes=tensor_shapes,
+            device_ordinal=ordinal_id)
+        for j, item in enumerate(outfeed_tensors):
+          dequeue_ops[j].append(item)
+
+    # Deconstruct dequeue ops.
+    flat_dequeue_ops = []
+    for l in dequeue_ops:
+      flat_dequeue_ops.extend(l)
+
+    dequeue_ops_by_name = {}
+    pos = 0
+    for name in self._names:
+      dequeue_ops_by_name[name] = dequeue_ops[pos:pos +
+                                              len(self._tensors[name])]
+      pos += len(self._tensors[name])
+
+    def _call_host_fn(fn, *args, **kw):
+      context = CatchInvalidHostcallFunctions()
+      context.Enter()
+      result = fn(*args, **kw)
+      context.Exit()
+      context.ExitResult(result)
+      return result
+
+    # It is assumed evaluation always happens on single host TPU system. So,
+    # place all ops on tpu host if possible.
+    #
+    # TODO(jhseu): Evaluate whether this is right for summaries.
+    with tf.compat.v1.device(
+        self._ctx.tpu_host_placement_function(replica_id=0)):
+      for name in self._names:
+        dequeue_ops = dequeue_ops_by_name[name]
+        for i, item in enumerate(dequeue_ops):
+          # TODO(xiejw): Make the specification of the outfeed combinaton
+          # function more explicit and well-documented.  We may want to give the
+          # user the option of concatenating along any axis.
+          if (self._ctx.config.tpu_config.per_host_input_for_training is
+              tpu_config.InputPipelineConfig.BROADCAST):
+            # If the infeed is in BROADCAST mode (each core recieving the same
+            # input), then we assume that the cores also produce identical
+            # copies of the same output, and we simply take the output from
+            # the first core.  This mode is used by Mesh-TensorFlow.
+            with tf.control_dependencies(dequeue_ops[i]):
+              dequeue_ops[i] = tf.identity(dequeue_ops[i][0])
+          else:
+            if dequeue_ops[i][0].shape.ndims == 0:
+              raise RuntimeError(
+                  'All tensors outfed from TPU should preserve batch size '
+                  'dimension, but got scalar {}'.format(dequeue_ops[i][0]))
+            # Assume that the input has been batch-split and that axis 0 of the
+            # output tensors represents the batch size.  Concatenate along
+            # the axis 0 to re-combine the batch.
+            dequeue_ops[i] = tf.concat(dequeue_ops[i], axis=0)
+
+        if self._tensor_keys[name] is not None:
+          # The user-provided eval_metrics[1] is a dict.
+          dequeue_ops = dict(zip(self._tensor_keys[name], dequeue_ops))
+          try:
+            ret[name] = _call_host_fn(self._host_fns[name], **dequeue_ops)
+          except TypeError as e:
+            tf.compat.v1.logging.warn(
+                'Exception while calling %s: %s. It is likely the tensors '
+                '(%s[1]) do not match the '
+                'function\'s arguments', name, e, name)
+            raise
+        else:
+          ret[name] = _call_host_fn(self._host_fns[name], *dequeue_ops)
+
+    # force all dequeue operations to be run if not consumed by the host calls
+    ret['__force_dequeue'] = tf.group(*flat_dequeue_ops)
+    return ret
+
+
+class _OutfeedHostCallHook(tf.compat.v1.train.SessionRunHook):
+  """Hook to run host calls when use_tpu=False."""
+
+  def __init__(self, tensors):
+    self._tensors = tensors
+
+  def begin(self):
+    # We duplicate this code from the TPUInfeedOutfeedSessionHook rather than
+    # create a separate hook to guarantee execution order, because summaries
+    # need to be initialized before the outfeed thread starts.
+    # TODO(jhseu): Make a wrapper hook instead?
+    self._init_ops = contrib_summary.summary_writer_initializer_op()
+    # Get all the writer resources from the initializer, so we know what to
+    # flush.
+    self._finalize_ops = []
+    for op in self._init_ops:
+      self._finalize_ops.append(contrib_summary.flush(writer=op.inputs[0]))
+
+  def after_create_session(self, session, coord):
+    session.run(self._init_ops)
+
+  def before_run(self, run_context):
+    return tf.compat.v1.train.SessionRunArgs(self._tensors)
+
+  def end(self, session):
+    session.run(self._finalize_ops)
+
+
+class _NotSaver(object):
+  """What to pass instead of a saver object if you don't want saving."""
+
+  def __init__(self, message):
+    self._message = message
+
+  def save(self, *args, **kwargs):
+    del args, kwargs
+    tf.compat.v1.logging.info(self._message)
+
+
+class ExamplesPerSecondHook(tf.compat.v1.train.StepCounterHook):
+  """Calculate and report global_step/sec and examples/sec during runtime."""
+
+  def __init__(self,
+               batch_size,
+               every_n_steps=100,
+               every_n_secs=None,
+               output_dir=None,
+               summary_writer=None):
+    self._batch_size = batch_size
+    super(ExamplesPerSecondHook, self).__init__(
+        every_n_steps=every_n_steps,
+        every_n_secs=every_n_secs,
+        output_dir=output_dir,
+        summary_writer=summary_writer)
+
+  def _log_and_record(self, elapsed_steps, elapsed_time, global_step):
+    global_step_per_sec = elapsed_steps / elapsed_time
+    examples_per_sec = self._batch_size * global_step_per_sec
+    if self._summary_writer is not None:
+      global_step_summary = Summary(value=[
+          Summary.Value(
+              tag='global_step/sec', simple_value=global_step_per_sec)
+      ])
+      example_summary = Summary(value=[
+          Summary.Value(tag='examples/sec', simple_value=examples_per_sec)
+      ])
+      self._summary_writer.add_summary(global_step_summary, global_step)
+      self._summary_writer.add_summary(example_summary, global_step)
+    tf.compat.v1.logging.info('global_step/sec: %g', global_step_per_sec)
+    tf.compat.v1.logging.info('examples/sec: %g', examples_per_sec)
+
+
+class InstallSignalHandlerHook(tf.compat.v1.train.SessionRunHook):
+  """Change SIGINT (CTRL^C) handler to force quit the process.
+
+  The default behavior often results in hanging processes.
+  The original handler is restored after training/evaluation.
+  """
+
+  def __init__(self):
+    self._signal_fn = signal.getsignal(signal.SIGINT)
+
+  def before_run(self, run_context):
+    signal.signal(signal.SIGINT, signal.SIG_DFL)
+
+  def end(self, session):
+    signal.signal(signal.SIGINT, self._signal_fn)
+
+
+class ExportSavedModelApiVersion(enum.Enum):
+  V1 = 1
+  V2 = 2
+
+
+class BatchConfig(
+    collections.namedtuple('BatchConfig', [
+        'num_batch_threads', 'max_batch_size', 'batch_timeout_micros',
+        'allowed_batch_sizes', 'max_enqueued_batches'
+    ])):
+  """Class to handle config inputs into the batching function."""
+
+  def __new__(cls,
+              num_batch_threads,
+              max_batch_size,
+              batch_timeout_micros,
+              allowed_batch_sizes,
+              max_enqueued_batches=100):
+    """Creates an BatchConfig instance.
+
+    Args:
+     num_batch_threads: Number of scheduling threads for processing batches of
+       work. Determines the number of batches processed in parallel.
+      max_batch_size: Batch sizes will never be bigger than this.
+      batch_timeout_micros: Maximum number of microseconds to wait before
+        outputting an incomplete batch.
+      allowed_batch_sizes: Optional list of allowed batch sizes. If left empty,
+        does nothing. Otherwise, supplies a list of batch sizes, causing the op
+        to pad batches up to one of those sizes. The entries must increase
+        monotonically, and the final entry must equal max_batch_size.
+      max_enqueued_batches: The maximum depth of the batch queue. Defaults to
+        100.
+
+    Returns:
+      An BatchConfig instance.
+    """
+    return super(BatchConfig, cls).__new__(
+        cls,
+        num_batch_threads=num_batch_threads,
+        max_batch_size=max_batch_size,
+        batch_timeout_micros=batch_timeout_micros,
+        allowed_batch_sizes=allowed_batch_sizes,
+        max_enqueued_batches=max_enqueued_batches)
+
+
+@estimator_export(v1=['estimator.tpu.TPUEstimator'])
+class TPUEstimator(estimator_lib.Estimator):
+  """Estimator with TPU support.
+
+  TPUEstimator also supports training on CPU and GPU. You don't need to define
+  a separate `tf.estimator.Estimator`.
+
+  TPUEstimator handles many of the details of running on TPU devices, such as
+  replicating inputs and models for each core, and returning to host
+  periodically to run hooks.
+
+  TPUEstimator transforms a global batch size in params to a per-shard batch
+  size when calling the `input_fn` and `model_fn`. Users should specify
+  global batch size in constructor, and then get the batch size for each shard
+  in `input_fn` and `model_fn` by `params['batch_size']`.
+
+  - For training, `model_fn` gets per-core batch size; `input_fn` may get
+    per-core or per-host batch size depending on `per_host_input_for_training`
+    in `TPUConfig` (See docstring for TPUConfig for details).
+
+  - For evaluation and prediction, `model_fn` gets per-core batch size and
+    `input_fn` get per-host batch size.
+
+  Evaluation
+  ==========
+
+  `model_fn` should return `TPUEstimatorSpec`, which expects the `eval_metrics`
+  for TPU evaluation. If eval_on_tpu is False, the evaluation will execute on
+  CPU or GPU; in this case the following discussion on TPU evaluation does not
+  apply.
+
+  `TPUEstimatorSpec.eval_metrics` is a tuple of `metric_fn` and `tensors`, where
+  `tensors` could be a list of any nested structure of `Tensor`s (See
+  `TPUEstimatorSpec` for details).  `metric_fn` takes the `tensors` and returns
+  a dict from metric string name to the result of calling a metric function,
+  namely a `(metric_tensor, update_op)` tuple.
+
+  One can set `use_tpu` to `False` for testing. All training, evaluation, and
+  predict will be executed on CPU. `input_fn` and `model_fn` will receive
+  `train_batch_size` or `eval_batch_size` unmodified as `params['batch_size']`.
+
+  Current limitations:
+  --------------------
+
+  1. TPU evaluation only works on a single host (one TPU worker) except
+     BROADCAST mode.
+
+  2. `input_fn` for evaluation should **NOT** raise an end-of-input exception
+     (`OutOfRangeError` or `StopIteration`). And all evaluation steps and all
+     batches should have the same size.
+
+  Example (MNIST):
+  ----------------
+
+  ```
+  # The metric Fn which runs on CPU.
+  def metric_fn(labels, logits):
+    predictions = tf.argmax(logits, 1)
+    return {
+      'accuracy': tf.compat.v1.metrics.precision(
+          labels=labels, predictions=predictions),
+    }
+
+  # Your model Fn which runs on TPU (eval_metrics is list in this example)
+  def model_fn(features, labels, mode, config, params):
+    ...
+    logits = ...
+
+    if mode = tf.estimator.ModeKeys.EVAL:
+      return tpu_estimator.TPUEstimatorSpec(
+          mode=mode,
+          loss=loss,
+          eval_metrics=(metric_fn, [labels, logits]))
+
+  # or specify the eval_metrics tensors as dict.
+  def model_fn(features, labels, mode, config, params):
+    ...
+    final_layer_output = ...
+
+    if mode = tf.estimator.ModeKeys.EVAL:
+      return tpu_estimator.TPUEstimatorSpec(
+          mode=mode,
+          loss=loss,
+          eval_metrics=(metric_fn, {
+              'labels': labels,
+              'logits': final_layer_output,
+          }))
+  ```
+
+  Prediction
+  ==========
+
+  Prediction on TPU is an experimental feature to support large batch inference.
+  It is not designed for latency-critical system. In addition, due to some
+  usability issues, for prediction with small dataset, CPU `.predict`, i.e.,
+  creating a new `TPUEstimator` instance with `use_tpu=False`, might be more
+  convenient.
+
+  Note: In contrast to TPU training/evaluation, the `input_fn` for prediction
+  *should* raise an end-of-input exception (`OutOfRangeError` or
+  `StopIteration`), which serves as the stopping signal to `TPUEstimator`. To be
+  precise, the ops created by `input_fn` produce one batch of the data.
+  The `predict()` API processes one batch at a time. When reaching the end of
+  the data source, an end-of-input exception should be raised by one of these
+  operations. The user usually does not need to do this manually. As long as the
+  dataset is not repeated forever, the `tf.data` API will raise an end-of-input
+  exception automatically after the last batch has been produced.
+
+  Note: Estimator.predict returns a Python generator. Please consume all the
+  data from the generator so that TPUEstimator can shutdown the TPU system
+  properly for user.
+
+  Current limitations:
+  --------------------
+  1. TPU prediction only works on a single host (one TPU worker).
+
+  2. `input_fn` must return a `Dataset` instance rather than `features`. In
+  fact, .train() and .evaluate() also support Dataset as return value.
+
+  Example (MNIST):
+  ----------------
+  ```
+  height = 32
+  width = 32
+  total_examples = 100
+
+  def predict_input_fn(params):
+    batch_size = params['batch_size']
+
+    images = tf.random.uniform(
+        [total_examples, height, width, 3], minval=-1, maxval=1)
+
+    dataset = tf.data.Dataset.from_tensor_slices(images)
+    dataset = dataset.map(lambda images: {'image': images})
+
+    dataset = dataset.batch(batch_size)
+    return dataset
+
+  def model_fn(features, labels, params, mode):
+     # Generate predictions, called 'output', from features['image']
+
+    if mode == tf.estimator.ModeKeys.PREDICT:
+      return tf.contrib.tpu.TPUEstimatorSpec(
+          mode=mode,
+          predictions={
+              'predictions': output,
+              'is_padding': features['is_padding']
+          })
+
+  tpu_est = TPUEstimator(
+      model_fn=model_fn,
+      ...,
+      predict_batch_size=16)
+
+  # Fully consume the generator so that TPUEstimator can shutdown the TPU
+  # system.
+  for item in tpu_est.predict(input_fn=input_fn):
+    # Filter out item if the `is_padding` is 1.
+    # Process the 'predictions'
+  ```
+
+  Exporting
+  =========
+
+  `export_saved_model` exports 2 metagraphs, one with `saved_model.SERVING`, and
+  another with `saved_model.SERVING` and `saved_model.TPU` tags. At serving
+  time, these tags are used to select the appropriate metagraph to load.
+
+  Before running the graph on TPU, the TPU system needs to be initialized. If
+  TensorFlow Serving model-server is used, this is done automatically. If not,
+  please use `session.run(tpu.initialize_system())`.
+
+  There are two versions of the API: 1 or 2.
+
+  In V1, the exported CPU graph is `model_fn` as it is. The exported TPU graph
+  wraps `tpu.rewrite()` and `TPUPartitionedCallOp` around `model_fn` so
+  `model_fn` is on TPU by default. To place ops on CPU,
+  `tpu.outside_compilation(host_call, logits)` can be used.
+
+  Example:
+  ----------------
+
+  ```
+  def model_fn(features, labels, mode, config, params):
+    ...
+    logits = ...
+    export_outputs = {
+      'logits': export_output_lib.PredictOutput(
+        {'logits': logits})
+    }
+
+    def host_call(logits):
+      class_ids = math_ops.argmax(logits)
+      classes = string_ops.as_string(class_ids)
+      export_outputs['classes'] =
+        export_output_lib.ClassificationOutput(classes=classes)
+
+    tpu.outside_compilation(host_call, logits)
+
+    ...
+  ```
+
+  In V2, `export_saved_model()` sets up `params['use_tpu']` flag to let the user
+  know if the code is exporting to TPU (or not). When `params['use_tpu']` is
+  `True`, users need to call `tpu.rewrite()`, `TPUPartitionedCallOp` and/or
+  `batch_function()`.
+
+  TIP: V2 is recommended as it is more flexible (eg: batching, etc).
+  """
+
+  def __init__(self,
+               model_fn=None,
+               model_dir=None,
+               config=None,
+               params=None,
+               use_tpu=True,
+               train_batch_size=None,
+               eval_batch_size=None,
+               predict_batch_size=None,
+               batch_axis=None,
+               eval_on_tpu=True,
+               export_to_tpu=True,
+               export_to_cpu=True,
+               warm_start_from=None,
+               embedding_config_spec=None,
+               export_saved_model_api_version=ExportSavedModelApiVersion.V1):
+    """Constructs an `TPUEstimator` instance.
+
+    Args:
+      model_fn: Model function as required by `Estimator` which returns
+        EstimatorSpec or TPUEstimatorSpec. `training_hooks`, 'evaluation_hooks',
+        and `prediction_hooks` must not capure any TPU Tensor inside the
+        model_fn.
+      model_dir: Directory to save model parameters, graph and etc. This can
+        also be used to load checkpoints from the directory into a estimator to
+        continue training a previously saved model. If `None`, the model_dir in
+        `config` will be used if set. If both are set, they must be same. If
+        both are `None`, a temporary directory will be used.
+      config: An `tpu_config.RunConfig` configuration object. Cannot be `None`.
+      params: An optional `dict` of hyper parameters that will be passed into
+        `input_fn` and `model_fn`.  Keys are names of parameters, values are
+        basic python types. There are reserved keys for `TPUEstimator`,
+        including 'batch_size'.
+      use_tpu: A bool indicating whether TPU support is enabled. Currently, -
+        TPU training and evaluation respect this bit, but eval_on_tpu can
+        override execution of eval. See below.
+      train_batch_size: An int representing the global training batch size.
+        TPUEstimator transforms this global batch size to a per-shard batch
+        size, as params['batch_size'], when calling `input_fn` and `model_fn`.
+        Cannot be `None` if `use_tpu` is `True`. Must be divisible by total
+        number of replicas.
+      eval_batch_size: An int representing evaluation batch size. Must be
+        divisible by total number of replicas.
+      predict_batch_size: An int representing the prediction batch size. Must be
+        divisible by total number of replicas.
+      batch_axis: A python tuple of int values describing how each tensor
+        produced by the Estimator `input_fn` should be split across the TPU
+        compute shards. For example, if your input_fn produced (images, labels)
+        where the images tensor is in `HWCN` format, your shard dimensions would
+        be [3, 0], where 3 corresponds to the `N` dimension of your images
+        Tensor, and 0 corresponds to the dimension along which to split the
+        labels to match up with the corresponding images. If None is supplied,
+        and per_host_input_for_training is True, batches will be sharded based
+        on the major dimension. If tpu_config.per_host_input_for_training is
+        False or `PER_HOST_V2`, batch_axis is ignored.
+      eval_on_tpu: If False, evaluation runs on CPU or GPU. In this case, the
+        model_fn must return `EstimatorSpec` when called with `mode` as `EVAL`.
+      export_to_tpu: If True, `export_saved_model()` exports a metagraph for
+        serving on TPU. Note that unsupported export modes such as EVAL will be
+        ignored. For those modes, only a CPU model will be exported. Currently,
+        export_to_tpu only supports PREDICT.
+      export_to_cpu: If True, `export_saved_model()` exports a metagraph for
+        serving on CPU.
+      warm_start_from: Optional string filepath to a checkpoint or SavedModel to
+        warm-start from, or a `tf.estimator.WarmStartSettings` object to fully
+        configure warm-starting.  If the string filepath is provided instead of
+        a `WarmStartSettings`, then all variables are warm-started, and it is
+        assumed that vocabularies and Tensor names are unchanged.
+      embedding_config_spec: Optional EmbeddingConfigSpec instance to support
+        using TPU embedding.
+      export_saved_model_api_version: an integer: 1 or 2. 1 corresponds to V1,
+        2 corresponds to V2. (Defaults to V1). With
+        V1, `export_saved_model()` adds rewrite() and TPUPartitionedCallOp() for
+        user; while in v2, user is expected to add rewrite(),
+        TPUPartitionedCallOp() etc in their model_fn.
+
+    Raises:
+      ValueError: `params` has reserved keys already.
+    """
+    if config is None or not isinstance(config, tpu_config.RunConfig):
+      raise ValueError(
+          '`config` must be provided with type `tpu_config.RunConfig`')
+
+    if params is not None and any(k in params for k in _RESERVED_PARAMS_KEYS):
+      raise ValueError('{} are reserved keys but existed in params {}.'.format(
+          _RESERVED_PARAMS_KEYS, params))
+
+    if use_tpu:
+      # Perform some very basic validations. More validations will be found in
+      # _InternalTPUContext.
+      if train_batch_size is None:
+        raise ValueError('`train_batch_size` cannot be `None`')
+      util_lib.check_positive_integer(train_batch_size, 'train_batch_size')
+
+      if (config.tpu_config.per_host_input_for_training is
+          tpu_config.InputPipelineConfig.PER_SHARD_V1 and
+          config.tpu_config.num_cores_per_replica):
+        raise ValueError(
+            'Model parallelism only supports per host input for training. '
+            'Please adjust TPURunconfig.per_host_input_for_training.')
+
+      if eval_batch_size is not None:
+        util_lib.check_positive_integer(eval_batch_size, 'eval_batch_size')
+
+      if predict_batch_size is not None:
+        util_lib.check_positive_integer(predict_batch_size,
+                                        'predict_batch_size')
+
+      if embedding_config_spec:
+        if (config.tpu_config.per_host_input_for_training not in (
+            tpu_config.InputPipelineConfig.PER_HOST_V1,
+            tpu_config.InputPipelineConfig.PER_HOST_V2)):
+          raise ValueError('Only PER_HOST_V1 and PER_HOST_V2 is supported when '
+                           'using TPU Embedding; got {}.'.format(
+                               config.tpu_config.per_host_input_for_training))
+        self._embedding_from_feature_columns = (
+            embedding_config_spec.feature_columns is not None)
+
+    if (not (use_tpu and eval_on_tpu) and embedding_config_spec and
+        embedding_config_spec.partition_strategy == 'mod'):
+      raise ValueError('Mod sharding of embedding tables not supported on '
+                       'CPU.')
+    _tpu_estimator_gauge.get_cell().set(True)
+    # Verifies the model_fn signature according to Estimator framework.
+    estimator_lib._verify_model_fn_args(model_fn, params)  # pylint: disable=protected-access
+    # We cannot store config and params in this constructor as parent
+    # constructor might change them, such as assigning a temp dir for
+    # config.model_dir.
+    model_function = self._augment_model_fn(model_fn, batch_axis)
+
+    # Overwrite log_step_count_steps to disable TensorLoggingHook and
+    # StepCounterHook from being created in Estimator. TPUEstimator already
+    # added equivalent hooks in _augment_model_fn above.
+    self._log_every_n_steps = config.log_step_count_steps
+    config = config.replace(log_step_count_steps=None)
+
+    # Passing non-None params as wrapped model_fn has it.
+    params = params or {}
+    super(TPUEstimator, self).__init__(
+        model_fn=model_function,
+        model_dir=model_dir,
+        config=config,
+        params=params,
+        warm_start_from=warm_start_from)
+    self._iterations_per_training_loop = util_lib.parse_iterations_per_loop(
+        self._config.tpu_config.iterations_per_loop)
+    # In absence of an explicit `log_every_n_secs` config, if the
+    # `iterations_per_loop` value is specified as time in seconds, enable
+    # logging every n secs based on the `iterations_per_loop` value. A trade-off
+    # avoiding API change on the current release.
+    # TODO(henrytan): add `log_every_n_secs` to RunConfig.
+    if self._iterations_per_training_loop.unit == 'seconds':
+      self._log_every_n_secs = self._iterations_per_training_loop.value
+      self._log_every_n_steps = None
+    elif self._iterations_per_training_loop.unit == 'count':
+      if self._log_every_n_steps is not None:
+        # Each session.run() lasts for iterations_per_loop. We can't log
+        # in-between a session.run(), and we can only log after the
+        # `iterations_per_loop` steps, so we can only approximate. If a user
+        # requests to log every N steps, we actually want to roughly log every
+        # N / `iterations_per_loop` steps to match the original intention.
+        self._log_every_n_steps = (
+            int(
+                math.ceil(
+                    float(self._log_every_n_steps) /
+                    self._iterations_per_training_loop.value)))
+      self._log_every_n_secs = None
+    else:
+      assert False, ('Invalid TPUConfig `iterations_per_loop` value. '
+                     'Indicates a bug in `iterations_per_loop` '
+                     'parsing.')
+
+    # All properties passed to _InternalTPUContext are immutable.
+    # pylint: disable=protected-access
+    self._ctx = tpu_context._get_tpu_context(self._config, train_batch_size,
+                                             eval_batch_size,
+                                             predict_batch_size, use_tpu,
+                                             eval_on_tpu, embedding_config_spec)
+
+    self._export_to_cpu = export_to_cpu
+    self._export_to_tpu = export_to_tpu
+
+    if not (isinstance(export_saved_model_api_version,
+                       ExportSavedModelApiVersion)
+            or export_saved_model_api_version == 1
+            or export_saved_model_api_version == 2):
+      raise ValueError('export_saved_model_api_version should be 1 or 2; '
+                       'got {}.'.format(
+                           export_saved_model_api_version))
+    self._export_saved_model_api_version = export_saved_model_api_version
+    self._is_input_fn_invoked = None
+
+    self._rendezvous = {}
+
+  def _add_meta_graph_for_mode(self,
+                               builder,
+                               input_receiver_fn_map,
+                               checkpoint_path,
+                               save_variables=True,
+                               mode=model_fn_lib.ModeKeys.PREDICT,
+                               export_tags=None,
+                               check_variables=True,
+                               strip_default_attrs=True):
+    if self._export_to_tpu and mode != model_fn_lib.ModeKeys.PREDICT:
+      tf.compat.v1.logging.warn(
+          'TPUEstimator only handles mode PREDICT for exporting '
+          'when `export_to_tpu` is `True`; Mode {} will be ignored '
+          'for TPU.'.format(mode))
+
+    if not self._export_to_cpu and not self._export_to_tpu:
+      raise ValueError('One of export_to_cpu and export_to_tpu must be true.')
+
+    if self._export_to_cpu:
+      (super(TPUEstimator, self)._add_meta_graph_for_mode(
+          builder,
+          input_receiver_fn_map,
+          checkpoint_path,
+          save_variables,
+          mode=mode,
+          export_tags=export_tags,
+          check_variables=check_variables,
+          strip_default_attrs=strip_default_attrs))
+
+    if self._export_to_tpu and mode == model_fn_lib.ModeKeys.PREDICT:
+      input_receiver_fn_map = {
+          _INFERENCE_ON_TPU_MODE: input_receiver_fn_map[mode]
+      }
+      export_tags = [tf.saved_model.SERVING, tf.saved_model.TPU]
+      mode = _INFERENCE_ON_TPU_MODE
+
+      # See b/110052256 for why `check_variables` is `False`.
+      if not self._export_to_cpu:
+        check_variables = save_variables = True
+      else:
+        check_variables = save_variables = False
+      (super(TPUEstimator, self)._add_meta_graph_for_mode(
+          builder,
+          input_receiver_fn_map,
+          checkpoint_path,
+          save_variables=save_variables,
+          mode=mode,
+          export_tags=export_tags,
+          check_variables=check_variables,
+          strip_default_attrs=strip_default_attrs))
+
+  def _call_model_fn(self, features, labels, mode, config):
+    if mode == _INFERENCE_ON_TPU_MODE:
+      context = tpu._TPUInferenceContext('tpu_inference', check_ops=False)
+      try:
+        context.Enter()
+        if (
+            (self._export_saved_model_api_version ==
+             ExportSavedModelApiVersion.V1)
+            or self._export_saved_model_api_version == 1):
+          result = self._call_model_fn_for_inference(features, labels, mode,
+                                                     config)
+        else:
+          result = super(TPUEstimator,
+                         self)._call_model_fn(features, labels, mode, config)
+      finally:
+        context.Exit()
+      return result
+    else:
+      return super(TPUEstimator, self)._call_model_fn(features, labels, mode,
+                                                      config)
+
+  def _call_model_fn_for_inference(self, features, labels, mode, config):
+    """Wraps `_call_model_fn` for `export_saved_model`."""
+    if mode != _INFERENCE_ON_TPU_MODE:
+      raise ValueError('mode must be {}; '
+                       'got {}.'.format(_INFERENCE_ON_TPU_MODE, mode))
+    return model_fn_inference_on_tpu(
+        self._model_fn,
+        features,
+        labels,
+        config,
+        self._params,
+        batch_config=None)
+
+  def _create_global_step(self, graph):
+    """Creates a global step suitable for TPUs.
+
+    Args:
+      graph: The graph in which to create the global step.
+
+    Returns:
+      A global step `Tensor`.
+
+    Raises:
+      ValueError: if the global step tensor is already defined.
+    """
+    return _create_global_step(graph)
+
+  def _convert_train_steps_to_hooks(self, steps, max_steps):
+    with self._ctx.with_mode(model_fn_lib.ModeKeys.TRAIN) as ctx:
+      if ctx.is_running_on_cpu():
+        return super(TPUEstimator,
+                     self)._convert_train_steps_to_hooks(steps, max_steps)
+
+    # On TPU.
+    if steps is None and max_steps is None:
+      raise ValueError(
+          'For TPU training, one of `steps` or `max_steps` must be set. '
+          'Cannot be both `None`.')
+
+    # Estimator.train has explicit positiveness check.
+    if steps is not None:
+      util_lib.check_positive_integer(steps, 'Train steps')
+    if max_steps is not None:
+      util_lib.check_positive_integer(max_steps, 'Train max_steps')
+
+    return [
+        _TPUStopAtStepHook(self._iterations_per_training_loop, steps, max_steps)
+    ]
+
+  def _convert_eval_steps_to_hooks(self, steps):
+    with self._ctx.with_mode(model_fn_lib.ModeKeys.EVAL) as ctx:
+      if ctx.is_running_on_cpu():
+        return super(TPUEstimator, self)._convert_eval_steps_to_hooks(steps)
+
+    if steps is None:
+      raise ValueError('Evaluate `steps` must be set on TPU. Cannot be `None`.')
+
+    util_lib.check_positive_integer(steps, 'Eval steps')
+
+    return [
+        evaluation._StopAfterNEvalsHook(  # pylint: disable=protected-access
+            num_evals=steps),
+        _SetEvalIterationsHook(steps)
+    ]
+
+  def _call_input_fn(self, input_fn, mode, input_context=None):
+    """Calls the input function.
+
+    Args:
+      input_fn: The input function.
+      mode: ModeKeys
+      input_context: Optional instance of `tf.distribute.InputContext`.
+
+    Returns:
+      In TPU mode, returns an input_fn to be called later in model_fn.
+      Otherwise, calls the input_fn and returns either fatures or
+        (features, labels).
+
+    Raises:
+      ValueError: if input_fn takes invalid arguments or does not have `params`.
+    """
+    input_fn_args = function_utils.fn_args(input_fn)
+    config = self.config  # a deep copy.
+    kwargs = {}
+    if 'params' in input_fn_args:
+      kwargs['params'] = self.params  # a deep copy.
+    else:
+      raise ValueError('input_fn ({}) does not include params argument, '
+                       'required by TPUEstimator to pass batch size as '
+                       'params["batch_size"]'.format(input_fn))
+    if 'config' in input_fn_args:
+      kwargs['config'] = config
+
+    if 'mode' in input_fn_args:
+      kwargs['mode'] = mode
+
+    if 'input_context' in input_fn_args:
+      kwargs['input_context'] = input_context
+
+    # Records the fact input_fn has been invoked.
+    self._is_input_fn_invoked = True
+
+    with self._ctx.with_mode(mode) as ctx:
+      if (ctx.is_running_on_cpu() and
+          ctx.is_input_slice_broadcast_to_all_cores()):
+        raise ValueError('Invalid TPUConfig `eval_training_input_configuration`'
+                         ' value. SLICED mode only works on use_tpu = True.')
+      # Setting the batch size in params first. This helps user to have same
+      # input_fn for use_tpu=True/False.
+      batch_size_for_input_fn = ctx.batch_size_for_input_fn
+      if batch_size_for_input_fn is not None:
+        _add_item_to_params(kwargs['params'], _BATCH_SIZE_KEY,
+                            batch_size_for_input_fn)
+
+      # For export_saved_model, input_fn is never passed to Estimator. So,
+      # `is_export_mode` must be False.
+      if ctx.is_running_on_cpu(is_export_mode=False):
+        with tf.compat.v1.device('/device:CPU:0'):
+          return input_fn(**kwargs)
+
+      # For TPU computation, input_fn should be invoked in a tf.while_loop for
+      # performance. While constructing the tf.while_loop, the structure of
+      # inputs returned by the `input_fn` needs to be recorded. The structure
+      # includes whether features or labels is dict or single Tensor, dict keys,
+      # tensor shapes, and dtypes. The recorded structure is used to create the
+      # infeed dequeue ops, which must be wrapped and passed as a Fn, called
+      # inside the TPU computation, as the TPU computation is wrapped inside a
+      # tf.while_loop also. So, we either pass input_fn to model_fn or pass
+      # dequeue_fn to model_fn. Here, `input_fn` is passed directly as
+      # `features` in `model_fn` signature.
+      def _input_fn(ctx):
+        _add_item_to_params(kwargs['params'], _CTX_KEY, ctx)
+        return input_fn(**kwargs)
+
+      return _input_fn
+
+  def _validate_features_in_predict_input(self, result):
+    """Skip the validation.
+
+    For TPUEstimator, we do not need to check the result type. `_InputPipeline`
+    has stronger check. Parent class's check generates confusing warning msg.
+
+    Args:
+      result: `features` returned by input_fn.
+    """
+    pass
+
+  def train(self,
+            input_fn,
+            hooks=None,
+            steps=None,
+            max_steps=None,
+            saving_listeners=None):
+    rendezvous = error_handling.ErrorRendezvous(num_sources=3)
+    self._rendezvous[model_fn_lib.ModeKeys.TRAIN] = rendezvous
+    try:
+      return super(TPUEstimator, self).train(
+          input_fn=input_fn,
+          hooks=hooks,
+          steps=steps,
+          max_steps=max_steps,
+          saving_listeners=saving_listeners)
+    except Exception:  # pylint: disable=broad-except
+      rendezvous.record_error('training_loop', sys.exc_info())
+    finally:
+      rendezvous.record_done('training_loop')
+      rendezvous.raise_errors()
+
+  def evaluate(self,
+               input_fn,
+               steps=None,
+               hooks=None,
+               checkpoint_path=None,
+               name=None):
+    rendezvous = error_handling.ErrorRendezvous(num_sources=3)
+    self._rendezvous[model_fn_lib.ModeKeys.EVAL] = rendezvous
+    try:
+      return super(TPUEstimator, self).evaluate(
+          input_fn,
+          steps=steps,
+          hooks=hooks,
+          checkpoint_path=checkpoint_path,
+          name=name)
+    except Exception:  # pylint: disable=broad-except
+      rendezvous.record_error('evaluation_loop', sys.exc_info())
+    finally:
+      rendezvous.record_done('evaluation_loop')
+      rendezvous.raise_errors()
+
+  def predict(self,
+              input_fn,
+              predict_keys=None,
+              hooks=None,
+              checkpoint_path=None,
+              yield_single_examples=True):
+    rendezvous = error_handling.ErrorRendezvous(num_sources=3)
+    self._rendezvous[model_fn_lib.ModeKeys.PREDICT] = rendezvous
+    try:
+      for result in super(TPUEstimator, self).predict(
+          input_fn=input_fn,
+          predict_keys=predict_keys,
+          hooks=hooks,
+          checkpoint_path=checkpoint_path,
+          yield_single_examples=yield_single_examples):
+        yield result
+    except Exception:  # pylint: disable=broad-except
+      rendezvous.record_error('prediction_loop', sys.exc_info())
+    finally:
+      rendezvous.record_done('prediction_loop')
+      rendezvous.raise_errors()
+
+    rendezvous.record_done('prediction_loop')
+    rendezvous.raise_errors()
+
+  def _augment_model_fn(self, model_fn, batch_axis):
+    """Returns a new model_fn, which wraps the TPU support."""
+
+    def _model_fn(features, labels, mode, config, params):
+      """A Estimator `model_fn` for TPUEstimator."""
+
+      # `input_fn` is called in `train()`, `evaluate()`, and `predict()`,
+      # but not in `export_saved_model()`.
+      if self._is_input_fn_invoked:
+        is_export_mode = False
+      else:
+        is_export_mode = True
+
+      # Clear the bit.
+      self._is_input_fn_invoked = None
+
+      if is_export_mode:
+        if mode == _INFERENCE_ON_TPU_MODE:
+          _add_item_to_params(params, _USE_TPU_KEY, True)
+          mode = model_fn_lib.ModeKeys.PREDICT
+        else:
+          _add_item_to_params(params, _USE_TPU_KEY, False)
+
+      with self._ctx.with_mode(mode) as ctx:
+        model_fn_wrapper = _ModelFnWrapper(model_fn, config, params, ctx)
+
+        # examples_hook is added to training_hooks for both CPU and TPU
+        # execution.
+        if (self._log_every_n_steps is not None or
+            self._log_every_n_secs is not None):
+          examples_hook = ExamplesPerSecondHook(
+              ctx.global_batch_size,
+              # pylint:disable=g-long-ternary
+              output_dir=(self.model_dir
+                          if not config or config.save_summary_steps else None),
+              # pylint:enable=g-long-ternary
+              every_n_steps=self._log_every_n_steps,
+              every_n_secs=self._log_every_n_secs)
+
+        if ctx.is_running_on_cpu(is_export_mode=is_export_mode):
+          tf.compat.v1.logging.info('Running %s on CPU/GPU', mode)
+          estimator_spec = model_fn_wrapper.call_without_tpu(
+              features, labels, is_export_mode=is_export_mode)
+          if (self._log_every_n_steps is not None or
+              self._log_every_n_secs is not None):
+            estimator_spec = estimator_spec._replace(
+                training_hooks=estimator_spec.training_hooks + (examples_hook,))
+          return estimator_spec
+
+        assert labels is None, '`labels` passed to `model_fn` must be `None`.'
+        # TPUEstimator._call_input_fn passes `input_fn` as features to here.
+        assert callable(features), '`input_fn` is not callable.'
+        input_fn = features
+
+        tpu_init_ops = []
+        if ctx.embedding_config and mode == model_fn_lib.ModeKeys.TRAIN:
+          dummy_table_variables, dummy_table_variables_init = (
+              tpu_embedding_gradient.create_dummy_table_variables(
+                  ctx.embedding_config.tpu_embedding))
+          ctx.embedding_config.dummy_table_variables = dummy_table_variables
+          tpu_init_ops.append(dummy_table_variables_init)
+
+        input_holders = _InputPipeline(input_fn, batch_axis, ctx)
+        enqueue_ops, dequeue_fn, input_hooks, run_infeed_loop_on_coordinator = (
+            input_holders.generate_infeed_enqueue_ops_and_dequeue_fn())
+
+        graph = tf.compat.v1.get_default_graph()
+        for enqueue_op in enqueue_ops:
+          if isinstance(enqueue_op, list):
+            graph.get_collection_ref(_TPU_ENQUEUE_OPS).extend(enqueue_op)
+          else:
+            graph.add_to_collection(_TPU_ENQUEUE_OPS, enqueue_op)
+
+        if mode == model_fn_lib.ModeKeys.TRAIN:
+          compile_op, loss, host_call, scaffold_fn, training_hooks = (
+              _train_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn))
+          has_saver_hook = training_hooks and any(
+              isinstance(hook, tf.compat.v1.train.CheckpointSaverHook)
+              for hook in training_hooks)
+          if ctx.embedding_config:
+            g = tf.compat.v1.get_default_graph()
+            table_to_config_dict = (
+                ctx.embedding_config.tpu_embedding.table_to_config_dict)
+            optimization_parameters = (
+                ctx.embedding_config.tpu_embedding.optimization_parameters)
+            if self._embedding_from_feature_columns:
+              embedding_variable_name_by_table, slot_variable_names_by_table = (
+                  _tpu_estimator_embedding.get_full_variable_names(
+                      g, table_to_config_dict, optimization_parameters))
+            else:
+              embedding_variable_name_by_table = None
+              slot_variable_names_by_table = None
+            embedding_variables_and_ops = (
+                ctx.embedding_config.tpu_embedding.create_variables_and_ops(
+                    embedding_variable_name_by_table,
+                    slot_variable_names_by_table))
+            tpu_init_ops.extend(embedding_variables_and_ops.load_ops())
+          # scaffold_fn must be called after variables for TPU embedding has
+          # been created on CPU, as user might reinitialize those from some
+          # checkpoint within scaffold_fn.
+          scaffold = _get_scaffold(scaffold_fn)
+
+          host_ops = host_call.create_tpu_hostcall()
+
+          shutdown_hooks = []
+          shutdown_mode = os.environ.get('TF_TPU_GRACEFUL_SHUTDOWN_MODE',
+                                         'reset_computation')
+          if shutdown_mode:
+            if shutdown_mode == 'shutdown_worker':
+              finalizer_hooks = [
+                  session_support.ShutdownLameWorkers(),
+              ]
+            elif shutdown_mode == 'shutdown_all_workers':
+              finalizer_hooks = [
+                  session_support.ShutdownAllWorkers(),
+              ]
+            elif shutdown_mode == 'reset_computation':
+              finalizer_hooks = [
+                  session_support.ResetComputation(),
+              ]
+            elif not shutdown_mode:
+              finalizer_hooks = []
+            else:
+              raise ValueError('Unknown TF_TPU_GRACEFUL_SHUTDOWN_MODE "%s"' %
+                               shutdown_mode)
+
+            if finalizer_hooks:
+              if has_saver_hook:
+                saver = _NotSaver(
+                    'No save on shutdown when there are user-defined '
+                    'CheckpointSaverHooks')
+              else:
+                saver = None  # Yes automatic save on shutdown.
+              shutdown_hooks.append(
+                  session_support.GracefulShutdownHook(
+                      checkpoint_prefix=self.model_dir + '/model.ckpt',
+                      on_shutdown_hooks=finalizer_hooks,
+                      saver=saver))
+
+          with tf.control_dependencies([loss]):
+            global_step = tf.identity(tf.compat.v1.train.get_global_step())
+          hooks = input_hooks + shutdown_hooks
+
+          if ctx.feed_hook is not None:
+            tf.compat.v1.logging.info(
+                'Use user implemented tpu infeed outfeed session hook class.')
+            infeed_outfeed_session_hook_class = ctx.feed_hook
+          else:
+            infeed_outfeed_session_hook_class = TPUInfeedOutfeedSessionHook
+
+          hooks.extend([
+              infeed_outfeed_session_hook_class(
+                  ctx,
+                  enqueue_ops,
+                  host_ops,
+                  tpu_compile_op=compile_op,
+                  run_infeed_loop_on_coordinator=(
+                      run_infeed_loop_on_coordinator),
+                  rendezvous=self._rendezvous[mode],
+                  master=self._config.master,
+                  session_config=self._session_config,
+                  tpu_init_ops=tpu_init_ops,
+                  outfeed_every_n_steps=self._config.tpu_config
+                  .experimental_host_call_every_n_steps),
+              InstallSignalHandlerHook()
+          ])
+          if _check_add_preemption_hook(self._config.cluster):
+            hooks.extend(
+                [preempted_hook.CloudTPUPreemptedHook(self._config.cluster)])
+          if (self._log_every_n_steps is not None or
+              self._log_every_n_secs is not None):
+            if self._iterations_per_training_loop.unit == 'count':
+              examples_hook._set_steps_per_run(  # pylint: disable=protected-access
+                  self._iterations_per_training_loop.value)
+            hooks.append(
+                tf.compat.v1.train.LoggingTensorHook(
+                    {
+                        'loss': tf.identity(loss),
+                        'step': global_step,
+                    },
+                    every_n_iter=self._log_every_n_steps,
+                    every_n_secs=self._log_every_n_secs))
+            hooks.append(examples_hook)
+
+          if training_hooks:
+            hooks.extend(training_hooks)
+
+          chief_hooks = []
+          if (not has_saver_hook and
+              (self._config.save_checkpoints_secs or
+               self._config.save_checkpoints_steps)):
+            checkpoint_hook = tf.compat.v1.train.CheckpointSaverHook(
+                self.model_dir,
+                save_secs=self._config.save_checkpoints_secs,
+                save_steps=self._config.save_checkpoints_steps,
+                scaffold=scaffold,
+                save_graph_def=self._config.checkpoint_save_graph_def)
+            if self._iterations_per_training_loop.unit == 'count':
+              checkpoint_hook._set_steps_per_run(  # pylint: disable=protected-access
+                  self._iterations_per_training_loop.value)
+            else:
+              # When estimating iterations_per_loop, set steps_per_run to an
+              # arbitrarily high number to force checking the global step on
+              # every call.
+              # TODO(henrytan): refactor SecondOrStepTimer to do this more
+              # explicitly.
+              checkpoint_hook._set_steps_per_run(  # pylint: disable=protected-access
+                  100000)
+            chief_hooks.append(checkpoint_hook)
+          else:
+            tf.compat.v1.logging.info('Bypassing TPUEstimator hook')
+
+          tf.compat.v1.summary.scalar(model_fn_lib.LOSS_METRIC_KEY, loss)
+          with tf.control_dependencies([loss]):
+            update_ops = _sync_variables_ops(ctx)
+            if ctx.embedding_config:
+              update_ops.extend(embedding_variables_and_ops.retrieve_ops())
+
+          # Validate the TPU training graph to catch basic errors
+          _validate_tpu_training_graph(ctx)
+
+          train_op = tf.group(*update_ops)
+          graph.add_to_collection(_TPU_TRAIN_OP, train_op)
+
+          return model_fn_lib.EstimatorSpec(
+              mode,
+              loss=loss,
+              training_chief_hooks=chief_hooks,
+              training_hooks=hooks,
+              train_op=train_op,
+              scaffold=scaffold)
+
+        if mode == model_fn_lib.ModeKeys.EVAL:
+          compile_op, total_loss, host_calls, scaffold_fn, eval_hooks = (
+              _eval_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn))
+          if ctx.embedding_config:
+            g = tf.compat.v1.get_default_graph()
+            table_to_config_dict = (
+                ctx.embedding_config.tpu_embedding.table_to_config_dict)
+            if self._embedding_from_feature_columns:
+              embedding_variable_name_by_table, _ = (
+                  _tpu_estimator_embedding.get_full_variable_names(
+                      g, table_to_config_dict))
+            else:
+              embedding_variable_name_by_table = None
+            embedding_variables_and_ops = (
+                ctx.embedding_config.tpu_embedding.create_variables_and_ops(
+                    embedding_variable_name_by_table))
+            tpu_init_ops.extend(embedding_variables_and_ops.load_ops())
+          # scaffold_fn must be called after variables for TPU embedding has
+          # been created on CPU, as user might reinitialize those from some
+          # checkpoint within scaffold_fn.
+          scaffold = _get_scaffold(scaffold_fn)
+          iterations_per_loop_var = _create_or_get_iterations_per_loop()
+          mean_loss = tf.compat.v1.div(
+              total_loss,
+              tf.cast(iterations_per_loop_var, dtype=total_loss.dtype))
+
+          with tf.control_dependencies([mean_loss]):
+            # After TPU evaluation computation is done (the mean_loss tensor),
+            # reads all variables back from TPU and updates the eval step
+            # counter properly
+            internal_ops_to_run = _sync_variables_ops(ctx)
+            internal_ops_to_run.append(
+                _increase_eval_step_op(iterations_per_loop_var))
+
+          host_call_ret = host_calls.create_tpu_hostcall()
+          eval_metric_ops = {}
+          eval_update_ops = []
+
+          eval_metrics = host_call_ret.get('eval_metrics', {})
+          if eval_metrics:
+            # Creates a dummy metric update_op for all metrics. Estimator
+            # expects all metrics in `eval_metric_ops` have update_op and calls
+            # them one by one. The real metric update_ops are invoked in a
+            # separated thread. So, here give Estimator the dummy op for all
+            # metrics.
+            with tf.control_dependencies(internal_ops_to_run):
+              dummy_update_op = tf.no_op()
+
+            for k, v in eval_metrics.items():
+              eval_metric_ops[k] = (v[0], dummy_update_op)
+              eval_update_ops.append(v[1])
+          else:
+            # If no eval metrics are passed, create an identity node for the
+            # loss and add `internal_ops_to_run` to its dependencies. So
+            # `internal_ops_to_run` can be executed.
+            with tf.control_dependencies(internal_ops_to_run):
+              mean_loss = tf.identity(mean_loss)
+
+          if 'host_call' not in host_call_ret:
+            host_ops = []
+          else:
+            host_ops = host_call_ret['host_call']
+          hooks = [
+              TPUInfeedOutfeedSessionHook(
+                  ctx,
+                  enqueue_ops,
+                  eval_update_ops + host_ops,
+                  tpu_compile_op=compile_op,
+                  run_infeed_loop_on_coordinator=(
+                      run_infeed_loop_on_coordinator),
+                  rendezvous=self._rendezvous[mode],
+                  master=self._config.evaluation_master,
+                  session_config=self._session_config,
+                  tpu_init_ops=tpu_init_ops)
+          ] + input_hooks
+
+          if _check_add_preemption_hook(self._config.cluster):
+            hooks.extend(
+                [preempted_hook.CloudTPUPreemptedHook(self._config.cluster)])
+
+          if eval_hooks:
+            hooks.extend(eval_hooks)
+
+          return model_fn_lib.EstimatorSpec(
+              mode,
+              loss=mean_loss,
+              evaluation_hooks=hooks,
+              eval_metric_ops=eval_metric_ops,
+              scaffold=scaffold)
+
+        # Predict
+        assert mode == model_fn_lib.ModeKeys.PREDICT
+
+        (compile_op, dummy_predict_op, host_calls, scaffold_fn,
+         prediction_hooks) = _predict_on_tpu_system(ctx, model_fn_wrapper,
+                                                    dequeue_fn)
+        scaffold = _get_scaffold(scaffold_fn)
+        with tf.control_dependencies([dummy_predict_op]):
+          internal_ops_to_run = _sync_variables_ops(ctx)
+          with tf.control_dependencies(internal_ops_to_run):
+            dummy_predict_op = tf.no_op()
+
+        # In train and evaluation, the main TPU program is passed to monitored
+        # training session to run. Infeed enqueue and outfeed dequeue are
+        # executed in side threads. This is not the configuration for
+        # prediction mode.
+        #
+        # For prediction, the Estimator executes the EstimatorSpec.predictions
+        # directly and yield the element (via generator) to call site. So, the
+        # outfeed based prediction must be passed to MonitoredSession directly.
+        # Other parts of the TPU execution are organized as follows.
+        #
+        # 1. All outfeed based Tensors must be grouped with predictions Tensors
+        #    to form a single invocation. This avoid the issue we might trigger
+        #    multiple outfeeds incorrectly. To achieve this, `host_call` is
+        #    placed in control_dependencies of `stopping_signals`, and
+        #    `stopping_signals` is passed into _StoppingPredictHook, which sets
+        #    the `stopping_signals` as SessionRunArgs. MonitoredSession merges
+        #    all SessionRunArgs with the fetch in session.run together.
+        #
+        # 2. The TPU program (dummy_predict_op) and enqueue_ops (infeed Enqueue)
+        #    are grouped together. They will be launched once and only once in
+        #    side threads and they quit naturally according to the SAME stopping
+        #    condition.
+        enqueue_ops.append(dummy_predict_op)
+
+        host_call_ret = host_calls.create_tpu_hostcall()
+        if 'host_call' not in host_call_ret:
+          host_ops = []
+        else:
+          host_ops = host_call_ret['host_call']
+
+        predictions = host_call_ret['predictions']
+        _verify_cross_hosts_transfer_size(
+            predictions,
+            message=(
+                'The estimated size for TPUEstimatorSpec.predictions is too '
+                'large.'))
+        signals = host_call_ret['signals']
+
+        with tf.control_dependencies(host_ops):
+          host_ops = []  # Empty, we do do not need it anymore.
+          scalar_stopping_signal = _StopSignals.as_scalar_stopping_signal(
+              signals)
+          predictions = _PaddingSignals.slice_tensor_or_dict(
+              predictions, signals)
+
+        hooks = [
+            _StoppingPredictHook(scalar_stopping_signal),
+            TPUInfeedOutfeedSessionHookForPrediction(
+                ctx,
+                enqueue_ops,
+                host_ops,
+                rendezvous=self._rendezvous[mode],
+                tpu_compile_op=compile_op,
+                master=self._config.master,
+                session_config=self._session_config),
+        ] + input_hooks
+
+        if prediction_hooks:
+          hooks.extend(prediction_hooks)
+
+        return model_fn_lib.EstimatorSpec(
+            mode,
+            prediction_hooks=hooks,
+            predictions=predictions,
+            scaffold=scaffold)
+
+    return _model_fn
+
+
+def _check_add_preemption_hook(cluster):
+  return (tpu_cluster_resolver.is_running_in_gce() and cluster and isinstance(
+      cluster, tf.distribute.cluster_resolver.TPUClusterResolver) and
+          cluster._cloud_tpu_client.api_available())
+
+
+def _export_output_to_tensors(export_output):
+  """Get a list of `Tensors` used in `export_output`.
+
+  Args:
+    export_output: an `ExportOutput` object such as `ClassificationOutput`,
+      `RegressionOutput`, or `PredictOutput`.
+
+  Returns:
+    a list of tensors used in export_output.
+
+  Raises:
+    ValueError: if `export_output` is not one of `ClassificationOutput`,
+        `RegressionOutput`, or `PredictOutput`.
+  """
+  if isinstance(export_output, export_output_lib.ClassificationOutput):
+    return [export_output.scores, export_output.classes]
+  elif isinstance(export_output, export_output_lib.RegressionOutput):
+    return [export_output.value]
+  elif isinstance(export_output, export_output_lib.PredictOutput):
+    return list(export_output.outputs.values())
+  else:
+    raise ValueError(
+        '`export_output` must be have type `ClassificationOutput`, '
+        '`RegressionOutput`, or `PredictOutput`; got {}.'.format(export_output))
+
+
+def _clone_export_output_with_tensors(export_output, tensors):
+  """Clones `export_output` but with new `tensors`.
+
+  Args:
+    export_output: an `ExportOutput` object such as `ClassificationOutput`,
+      `RegressionOutput`, or `PredictOutput`.
+    tensors: a list of `Tensors` used to construct a new `export_output`.
+
+  Returns:
+    A dict similar to `export_output` but with `tensors`.
+
+  Raises:
+    ValueError: if `export_output` is not one of `ClassificationOutput`,
+        `RegressionOutput`, or `PredictOutput`.
+  """
+  if isinstance(export_output, export_output_lib.ClassificationOutput):
+    if len(tensors) != 2:
+      raise ValueError('tensors must be of length 2; '
+                       'got {}.'.format(len(tensors)))
+    return export_output_lib.ClassificationOutput(*tensors)
+  elif isinstance(export_output, export_output_lib.RegressionOutput):
+    if len(tensors) != 1:
+      raise ValueError('tensors must be of length 1; '
+                       'got {}'.format(len(tensors)))
+    return export_output_lib.RegressionOutput(*tensors)
+  elif isinstance(export_output, export_output_lib.PredictOutput):
+    return export_output_lib.PredictOutput(
+        dict(zip(export_output.outputs.keys(), tensors)))
+  else:
+    raise ValueError(
+        '`export_output` must be have type `ClassificationOutput`, '
+        '`RegressionOutput`, or `PredictOutput`; got {}.'.format(export_output))
+
+
+def _eval_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn):
+  """Executes `model_fn_wrapper` multiple times on all TPU shards."""
+  iterations_per_loop_var = _create_or_get_iterations_per_loop()
+
+  (single_tpu_eval_step, host_calls, captured_scaffold_fn, captured_eval_hooks
+  ) = model_fn_wrapper.convert_to_single_tpu_eval_step(dequeue_fn)
+
+  @tpu_function.on_device_training_loop
+  def multi_tpu_eval_steps_on_single_shard(replica_id):
+    # `tpu.split_compile_and_shard()` splits and passes input for each
+    # replica as an array. As so, correctly reshape the input to be a
+    # scalar.
+    replica_id = tf.reshape(replica_id, [])
+    with tpu_context._TPUEstimatorReplicaContext(replica_id):  # pylint: disable=protected-access
+      return training_loop.repeat(iterations_per_loop_var, single_tpu_eval_step,
+                                  [_ZERO_LOSS])
+
+  # Add input that represents id for each replica in sync so that
+  # _TPUEstimatorReplicaContext can be correctly entered during
+  # replicated computation.
+  replica_id_inputs = []
+  replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)])
+
+  (
+      compile_op,
+      loss,
+  ) = tpu.split_compile_and_shard(
+      multi_tpu_eval_steps_on_single_shard,
+      inputs=replica_id_inputs,
+      num_shards=ctx.num_replicas,
+      outputs_from_all_shards=False,
+      device_assignment=ctx.device_assignment)
+
+  loss = loss[0]
+  return (compile_op, loss, host_calls, captured_scaffold_fn,
+          captured_eval_hooks.get())
+
+
+def _train_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn):
+  """Executes `model_fn_wrapper` multiple times on all TPU shards."""
+  iterations_per_loop_var = _create_or_get_iterations_per_loop()
+
+  (single_tpu_train_step, host_call, captured_scaffold_fn,
+   captured_training_hooks) = (
+       model_fn_wrapper.convert_to_single_tpu_train_step(dequeue_fn))
+
+  @tpu_function.on_device_training_loop
+  def multi_tpu_train_steps_on_single_shard(replica_id):
+    # `tpu.split_compile_and_shard()` splits and passes input for each
+    # replica as an array. As so, correctly reshape the input to be a
+    # scalar.
+    replica_id = tf.reshape(replica_id, [])
+    with tpu_context._TPUEstimatorReplicaContext(replica_id):  # pylint: disable=protected-access
+      outputs = training_loop.while_loop(
+          lambda i, loss: i < iterations_per_loop_var,
+          lambda i, loss: [i + 1, single_tpu_train_step(i)],
+          inputs=[0, _INITIAL_LOSS])
+      return outputs[1:]
+
+  # Add input that represents id for each replica in sync so that
+  # _TPUEstimatorReplicaContext can be correctly entered during
+  # replicated computation.
+  replica_id_inputs = []
+  replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)])
+
+  (compile_op, loss) = tpu.split_compile_and_shard(
+      multi_tpu_train_steps_on_single_shard,
+      inputs=replica_id_inputs,
+      num_shards=ctx.num_replicas,
+      outputs_from_all_shards=False,
+      device_assignment=ctx.device_assignment)
+
+  loss = loss[0]
+  return (compile_op, loss, host_call, captured_scaffold_fn,
+          captured_training_hooks.get())
+
+
+def _predict_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn):
+  """Executes `model_fn_wrapper` multiple times on all TPU shards."""
+  (single_tpu_predict_step, host_calls, captured_scaffold_fn,
+   captured_predict_hooks
+  ) = model_fn_wrapper.convert_to_single_tpu_predict_step(dequeue_fn)
+
+  @tpu_function.on_device_training_loop
+  def multi_tpu_predict_steps_on_single_shard(replica_id):
+    # `tpu.split_compile_and_shard()` splits and passes input for each
+    # replica as an array. As so, correctly reshape the input to be a
+    # scalar.
+    replica_id = tf.reshape(replica_id, [])
+    with tpu_context._TPUEstimatorReplicaContext(replica_id):  # pylint: disable=protected-access
+
+      def cond(scalar_stopping_signal):
+        return tf.math.logical_not(
+            _StopSignals.should_stop(scalar_stopping_signal))
+
+      inputs = [_StopSignals.NON_STOPPING_SIGNAL]
+      outputs = training_loop.while_loop(
+          cond, single_tpu_predict_step, inputs=inputs, name=b'loop')
+      return outputs
+
+  # Add input that represents id for each replica in sync so that
+  # _TPUEstimatorReplicaContext can be correctly entered during
+  # replicated computation.
+  replica_id_inputs = []
+  replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)])
+  (
+      compile_op,
+      dummy_predict_op,
+  ) = tpu.split_compile_and_shard(
+      multi_tpu_predict_steps_on_single_shard,
+      inputs=replica_id_inputs,
+      num_shards=ctx.num_replicas,
+      outputs_from_all_shards=False,
+      device_assignment=ctx.device_assignment)
+
+  dummy_predict_op = dummy_predict_op[0]
+  return (compile_op, dummy_predict_op, host_calls, captured_scaffold_fn,
+          captured_predict_hooks.get())
+
+
+def _wrap_computation_in_while_loop(device, op_fn):
+  """Wraps the ops generated by `op_fn` in tf.while_loop."""
+
+  def computation(i):
+    with tf.control_dependencies(op_fn()):
+      return i + 1
+
+  iterations_per_loop_var = _create_or_get_iterations_per_loop()
+  # By setting parallel_iterations=1, the parallel execution in while_loop is
+  # basically turned off.
+  with tf.compat.v1.device(device):
+    iterations = tf.identity(iterations_per_loop_var)
+    return tf.compat.v1.while_loop(
+        lambda i: i < iterations,
+        computation, [tf.constant(0)],
+        parallel_iterations=1)
+
+
+def _wrap_computation_in_while_loop_with_stopping_signals(device, op_fn):
+  """Wraps the ops generated by `op_fn` in tf.while_loop."""
+
+  def cond(scalar_stopping_signal):
+    return tf.math.logical_not(_StopSignals.should_stop(scalar_stopping_signal))
+
+  def computation(unused_scalar_stopping_signal):
+    return_value = op_fn()
+    execute_ops = return_value['ops']
+    signals = return_value['signals']
+    with tf.control_dependencies(execute_ops):
+      return _StopSignals.as_scalar_stopping_signal(signals)
+
+  # By setting parallel_iterations=1, the parallel execution in while_loop is
+  # basically turned off.
+  with tf.compat.v1.device(device):
+    return tf.compat.v1.while_loop(
+        cond,
+        computation, [_StopSignals.NON_STOPPING_SIGNAL],
+        parallel_iterations=1)
+
+
+def _validate_tpu_training_graph(ctx):
+  """Validate graph before running distributed training.
+
+  Args:
+    ctx: A `_InternalTPUContext` instance with mode.
+
+  Raises:
+    ValueError: If the graph seems invalid for running on device
+  """
+  if control_flow_util.ENABLE_CONTROL_FLOW_V2:
+    return  # b/124241278
+
+  operations = tf.compat.v1.get_default_graph().get_operations()
+
+  # Check if there is atleast one CrossReplicaSum operation in the graph
+  # This should be introduced by using the CrossShardOptimizer wrapper
+  cross_replica_sum_ops = [
+      o for o in operations if o.type == _CROSS_REPLICA_SUM_OP
+  ]
+  if not cross_replica_sum_ops and ctx.num_replicas > 1:
+    raise ValueError(
+        'CrossShardOptimizer must be used for model training on TPUs.')
+
+
+class _CapturedObject(object):
+  """A placeholder to capture an object.
+
+  This is useful when we need to capture a Python object in the Tensorflow
+  control flow body function and use it outside the control flow.
+  """
+
+  def __init__(self):
+    self._object = None
+    self._captured = False
+
+  def capture(self, o):
+    if self._captured:
+      raise RuntimeError(
+          'InternalError: Object can capture only once. Please file bug.')
+
+    self._captured = True
+    self._object = o
+
+  def get(self):
+    if not self._captured:
+      raise RuntimeError(
+          'InternalError: Object is not captured properly before `get`. '
+          'Please file bug.')
+    return self._object
+
+
+def _get_scaffold(captured_scaffold_fn):
+  """Retrieves the Scaffold from `captured_scaffold_fn`."""
+  with _CapturingContext(message='Inside scaffold_fn'):
+    scaffold_fn = captured_scaffold_fn.get()
+    if scaffold_fn:
+      scaffold = scaffold_fn()
+      if scaffold is None:
+        raise ValueError(
+            'TPUEstimatorSpec.scaffold_fn returns None, which is not allowed')
+    else:
+      scaffold = None
+
+  if scaffold:
+    wrapped_finalize = scaffold.finalize
+
+    def _finalize():
+      with _CapturingContext('Inside Scaffold.finalize'):
+        wrapped_finalize()
+
+    scaffold.finalize = _finalize
+  return scaffold
+
+
+class _CapturingContext(control_flow_ops.ControlFlowContext):
+  """Tracks references to Tensors defined in TPU replication."""
+
+  def __init__(self, message):
+    control_flow_ops.ControlFlowContext.__init__(self)
+    self._message = message
+
+  def to_control_flow_context_def(self, context_def, export_scope=None):
+    # pylint: disable=useless-super-delegation
+    # NOTE(slebedev): the method is required by `ControlFlowContext`.
+    super(_CapturingContext,
+          self).to_control_flow_context_def(context_def, export_scope)
+
+  def AddOp(self, op):  # pylint: disable=invalid-name
+    for c in op.inputs:
+      if tpu._TPU_REPLICATE_ATTR in c.op.node_def.attr:  # pylint: disable=protected-access
+        raise ValueError('{}: Op {} depends on TPU computation {}, '
+                         'which is not allowed.'.format(self._message, op, c))
+
+  def AddValue(self, value):
+    self.AddOp(value.op)
+    return value
+
+  def __enter__(self):
+    # pylint: disable=protected-access
+    self._g = tf.compat.v1.get_default_graph()
+    self._old = self._g._get_control_flow_context()
+    self._g._set_control_flow_context(self)
+    # pylint: enable=protected-access
+
+  def __exit__(self, _, __, ___):  # pylint: disable=invalid-name
+    self._g._set_control_flow_context(self._old)  # pylint: disable=protected-access
+
+
+class _Inputs(object):
+  """A data structure representing the input_fn returned values.
+
+  This also supports the returned value from input_fn as `Dataset`.
+  """
+
+  def __init__(self, features=None, labels=None, dataset=None, signals=None):
+    if dataset is not None and (features is not None or labels is not None or
+                                signals is not None):
+      raise RuntimeError('Internal Error: Either (features and labels) or '
+                         'dataset should be provided, not both. Please file '
+                         'bug')
+
+    self._features = features
+    self._labels = labels
+    self._signals = signals
+
+    self._dataset = dataset
+    self._iterator = None
+
+  @staticmethod
+  def from_input_fn(return_values):
+    """Returns an `_Inputs` instance according to `input_fn` return value."""
+    if isinstance(return_values, dataset_ops.DatasetV2):
+      dataset = return_values
+      return _Inputs(dataset=dataset)
+
+    features, labels = _Inputs._parse_inputs(return_values)
+    return _Inputs(features, labels)
+
+  @staticmethod
+  def _parse_inputs(return_values):
+    if isinstance(return_values, tuple):
+      features, labels = return_values
+    else:
+      features, labels = return_values, None
+    return features, labels
+
+  @property
+  def is_dataset(self):
+    """Returns True if the return value from input_fn is Dataset."""
+    return self._dataset is not None
+
+  def dataset_initializer(self):
+    """Returns the dataset's initializer.
+
+    The initializer must be run before calling `features_and_labels`.
+    """
+    self._iterator = tf.compat.v1.data.make_initializable_iterator(
+        self._dataset)
+    return self._iterator.initializer
+
+  def features_and_labels(self):
+    """Gets `features` and `labels`."""
+    if self.is_dataset:
+      if self._iterator is None:
+        raise RuntimeError('Internal error: Must run dataset_initializer '
+                           'before calling features_and_labels(). Please file '
+                           'a bug!')
+      return _Inputs._parse_inputs(self._iterator.get_next())
+
+    return (self._features, self._labels)
+
+  def signals(self):
+    return self._signals
+
+  @property
+  def dataset(self):
+    return self._dataset
+
+
+class _InputsWithStoppingSignals(_Inputs):
+  """Inputs with `_StopSignals` inserted into the dataset."""
+
+  def __init__(self,
+               dataset,
+               batch_size,
+               add_padding=False,
+               num_invocations_per_step=1):
+
+    assert dataset is not None
+    user_provided_dataset = dataset.map(
+        _InputsWithStoppingSignals.insert_stopping_signal(
+            stop=False, batch_size=batch_size, add_padding=add_padding))
+    if num_invocations_per_step == 1:
+      final_batch_dataset = dataset.take(1).map(
+          _InputsWithStoppingSignals.insert_stopping_signal(
+              stop=True, batch_size=batch_size, add_padding=add_padding))
+    else:
+      # We append (2 * num_invocations_per_step - 1) batches for exhausting the
+      # user_provided_dataset and stop properly.
+      # For example, if num_invocations_per_step is 2, we append 3 additional
+      # padding batches: b1, b2, b3.
+      # If user_provided_dataset contains two batches: a1, a2
+      # Step 1: [a1, a2]
+      # Step 2: [b1, b2] -> STOP
+      # If user_provided_dataset contains three batches: a1, a2, a3.
+      # The training loops:
+      # Step 1: [a1, a2]
+      # Step 2: [a3, b1]
+      # Step 3: [b2, b3] -> STOP.
+      final_batch_dataset = dataset.take(1).map(
+          _InputsWithStoppingSignals.insert_stopping_signal(
+              stop=True, batch_size=batch_size, add_padding=add_padding))
+      final_batch_dataset = final_batch_dataset.repeat(
+          2 * num_invocations_per_step - 1)
+
+      def _set_mask(data_dict):
+        signals = data_dict['signals']
+        signals['padding_mask'] = tf.compat.v1.ones_like(
+            signals['padding_mask'])
+        data_dict['signals'] = signals
+        return data_dict
+
+      # Mask out the extra batch.
+      final_batch_dataset = final_batch_dataset.map(_set_mask)
+
+    dataset = user_provided_dataset.concatenate(final_batch_dataset).prefetch(2)
+
+    super(_InputsWithStoppingSignals, self).__init__(dataset=dataset)
+    self._current_inputs = None
+
+  def features_and_labels(self):
+    if self._current_inputs is not None:
+      raise RuntimeError(
+          'Internal Error: The previous inputs have not been properly '
+          'consumed. First call features_and_labels, then call signals.')
+
+    inputs_with_signals = self._iterator.get_next()
+    features = inputs_with_signals['features']
+    labels = inputs_with_signals.get('labels')
+
+    self._current_inputs = inputs_with_signals
+    return features, labels
+
+  def signals(self):
+    """Returns the `Signals` from `_Inputs`."""
+    if self._current_inputs is None:
+      raise RuntimeError(
+          'Internal Error: The current inputs have not been properly '
+          'generated. First call features_and_labels, then call signals.')
+    signals = self._current_inputs['signals']
+    self._current_inputs = None
+    return signals
+
+  @staticmethod
+  def insert_stopping_signal(stop, batch_size, add_padding=False):
+    """Inserts stopping_signal into dataset via _map_fn.
+
+    Here we change the data structure in the dataset, such that the return value
+    is a dictionary now and `features`, `labels`, and `signals` are three
+    distinguished keys in that dict. This provides a better structure, which
+    eases the process to decompose the inputs (see `features_and_labels`).
+
+    Args:
+      stop: bool, state of current stopping signals.
+      batch_size: int, batch size.
+      add_padding: bool, whether to pad the tensor to full batch size.
+
+    Returns:
+      A map_fn passed to dataset.map API.
+    """
+
+    def _map_fn(*args):
+      """The map fn to insert signals."""
+      if len(args) == 1:
+        # Unpack the single Tensor/dict argument as features. This is required
+        # for the input_fn returns no labels.
+        args = args[0]
+      features, labels = _Inputs._parse_inputs(args)
+      new_input_dict = {}
+
+      if add_padding:
+        padding_mask, features, labels = (
+            _PaddingSignals.pad_features_and_labels(features, labels,
+                                                    batch_size))
+
+        new_input_dict['features'] = features
+        if labels is not None:
+          new_input_dict['labels'] = labels
+
+      else:
+        new_input_dict['features'] = features
+        if labels is not None:
+          new_input_dict['labels'] = labels
+        padding_mask = None
+
+      new_input_dict['signals'] = _StopSignals(
+          stop=stop, batch_size=batch_size,
+          padding_mask=padding_mask).as_dict()
+
+      return new_input_dict
+
+    return _map_fn
+
+
+class _StopSignals(object):
+  """Signals class holding all logic to handle TPU stopping condition."""
+
+  NON_STOPPING_SIGNAL = False
+  STOPPING_SIGNAL = True
+
+  def __init__(self, stop, batch_size, padding_mask=None):
+    self._stop = stop
+    self._batch_size = batch_size
+    self._padding_mask = padding_mask
+
+  def as_dict(self):
+    """Returns the signals as Python dict."""
+    shape = [self._batch_size, 1]
+    dtype = tf.dtypes.bool
+
+    if self._stop:
+      stopping = tf.ones(shape=shape, dtype=dtype)
+    else:
+      stopping = tf.zeros(shape=shape, dtype=dtype)
+
+    signals = {'stopping': stopping}
+    if self._padding_mask is not None:
+      signals['padding_mask'] = self._padding_mask
+    return signals
+
+  @staticmethod
+  def as_scalar_stopping_signal(signals):
+    return tf.identity(signals['stopping'][0][0])
+
+  @staticmethod
+  def should_stop(scalar_stopping_signal):
+    """Detects whether scalar_stopping_signal indicates stopping."""
+    if isinstance(scalar_stopping_signal, tf.Tensor):
+      # STOPPING_SIGNAL is a constant True. Here, the logical_and is just the TF
+      # way to express the bool check whether scalar_stopping_signal is True.
+      return tf.math.logical_and(scalar_stopping_signal,
+                                 _StopSignals.STOPPING_SIGNAL)
+    else:
+      # For non Tensor case, it is used in SessionRunHook. So, we cannot modify
+      # the graph anymore. Here, we use pure Python.
+      return bool(scalar_stopping_signal)
+
+
+class _PaddingSignals(object):
+  """Signals class holding all logic to handle padding."""
+
+  @staticmethod
+  def pad_features_and_labels(features, labels, batch_size):
+    """Pads out the batch dimension of features and labels."""
+    real_batch_size = tf.compat.v1.shape(
+        _PaddingSignals._find_any_tensor(features))[0]
+
+    batch_size_tensor = tf.constant(batch_size, tf.dtypes.int32)
+
+    check_greater = tf.compat.v1.debugging.assert_greater_equal(
+        batch_size_tensor,
+        real_batch_size,
+        data=(batch_size_tensor, real_batch_size),
+        message='The real batch size should not be greater than batch_size.')
+
+    with tf.control_dependencies([check_greater]):
+      missing_count = batch_size_tensor - real_batch_size
+
+    def pad_single_tensor(tensor):
+      """Pads out the batch dimension of a tensor to the complete batch_size."""
+      rank = len(tensor.shape)
+      assert rank > 0
+      padding = tf.stack([[0, missing_count]] + [[0, 0]] * (rank - 1))
+      padded_shape = (batch_size,) + tuple(tensor.shape[1:])
+      padded_tensor = tf.compat.v1.pad(tensor, padding)
+      padded_tensor.set_shape(padded_shape)
+      return padded_tensor
+
+    def nest_pad(tensor_or_dict):
+      return tf.nest.map_structure(pad_single_tensor, tensor_or_dict)
+
+    features = nest_pad(features)
+    if labels is not None:
+      labels = nest_pad(labels)
+
+    padding_mask = _PaddingSignals._padding_mask(real_batch_size, missing_count,
+                                                 batch_size)
+
+    return padding_mask, features, labels
+
+  @staticmethod
+  def slice_tensor_or_dict(tensor_or_dict, signals):
+    """Slice the real Tensors according to padding mask in signals."""
+
+    padding_mask = signals['padding_mask']
+    batch_size = tf.compat.v1.shape(padding_mask)[0]
+
+    def verify_batch_size(tensor):
+      check_batch_size = tf.math.equal(batch_size, tensor.shape[0])
+      with tf.control_dependencies([check_batch_size]):
+        return tf.identity(tensor)
+
+    def slice_single_tensor(tensor):
+      rank = len(tensor.shape)
+      assert rank > 0
+      real_batch_size = batch_size - tf.math.reduce_sum(padding_mask)
+      return verify_batch_size(tensor)[0:real_batch_size]
+
+    # As we split the Tensors to all TPU cores and concat them back, it is
+    # important to ensure the real data is placed before padded ones, i.e.,
+    # order is preserved. By that, the sliced padding mask should have all 0's.
+    # If this assertion failed, # the slice logic here would not hold.
+    sliced_padding_mask = slice_single_tensor(padding_mask)
+    assert_padding_mask = tf.math.equal(
+        tf.math.reduce_sum(sliced_padding_mask), 0)
+
+    with tf.control_dependencies([assert_padding_mask]):
+      should_stop = _StopSignals.should_stop(
+          _StopSignals.as_scalar_stopping_signal(signals))
+
+    is_full_batch = tf.math.equal(tf.math.reduce_sum(padding_mask), 0)
+
+    def slice_fn(tensor):
+      # If the current batch is full batch or part of stopping signals, we do
+      # not need to slice to save performance.
+      return tf.compat.v1.cond(
+          tf.math.logical_or(should_stop, is_full_batch),
+          (lambda: verify_batch_size(tensor)),
+          (lambda: slice_single_tensor(tensor)))
+
+    return tf.nest.map_structure(slice_fn, tensor_or_dict)
+
+  @staticmethod
+  def _find_any_tensor(batch_features):
+    tensors = [
+        x for x in tf.nest.flatten(batch_features) if isinstance(x, tf.Tensor)
+    ]
+    if not tensors:
+      raise ValueError('Cannot find any Tensor in features dict.')
+    return tensors[0]
+
+  @staticmethod
+  def _padding_mask(real_batch_size, missing_count, batch_size):
+    padding_mask = tf.concat([
+        tf.zeros((real_batch_size,), dtype=tf.dtypes.int32),
+        tf.ones((missing_count,), dtype=tf.dtypes.int32)
+    ],
+                             axis=0)
+    padding_mask.set_shape((batch_size,))
+    return padding_mask
+
+
+def _verify_cross_hosts_transfer_size(tensor_dict, message):
+  total_size = 0
+  tensor_structure = {}
+  for key, tensor in tensor_dict.items():
+    shape = tensor.shape
+    size = np.product(shape) * tensor.dtype.size
+    tensor_structure[key] = shape
+    total_size += size
+  if total_size >= _ONE_GIGABYTE:
+    raise ValueError(
+        '{} The transfer size is larger than the protobuf limit. Please '
+        'consider to use Tensors with smaller shapes or reduce batch '
+        'size. Given:\n'
+        '{}'.format(
+            message, '\n'.join([
+                ' -- Key: {}, Shape: {}'.format(k, v)
+                for k, v in tensor_structure.items()
+            ])))
+
+
+def _add_item_to_params(params, key, value):
+  """Adds a new item into `params`."""
+  if hasattr(params, 'set_hparam'):
+    # For HParams, we need to use special API.
+    if key in params:
+      params.set_hparam(key, value)
+    else:
+      params.add_hparam(key, value)
+  else:
+    # Now params is Python dict.
+    params[key] = value
+
+
+def export_estimator_savedmodel(estimator,
+                                export_dir_base,
+                                serving_input_receiver_fn,
+                                assets_extra=None,
+                                as_text=False,
+                                checkpoint_path=None):
+  """Export `Estimator` trained model for TPU inference.
+
+  Args:
+    estimator: `Estimator` with which model has been trained.
+    export_dir_base: A string containing a directory in which to create
+      timestamped subdirectories containing exported SavedModels.
+    serving_input_receiver_fn: A function that takes no argument and returns a
+      `ServingInputReceiver` or `TensorServingInputReceiver`.
+    assets_extra: A dict specifying how to populate the assets.extra directory
+      within the exported SavedModel, or `None` if no extra assets are needed.
+    as_text: whether to write the SavedModel proto in text format.
+    checkpoint_path: The checkpoint path to export.  If `None` (the default),
+      the most recent checkpoint found within the model directory is chosen.
+
+  Returns:
+    The string path to the exported directory.
+  """
+  # `TPUEstimator` requires `tpu_config.RunConfig`, so we cannot use
+  # `estimator.config`.
+  config = tpu_config.RunConfig(model_dir=estimator.model_dir)
+  est = TPUEstimator(
+      estimator._model_fn,  # pylint: disable=protected-access
+      config=config,
+      params=estimator.params,
+      use_tpu=True,
+      train_batch_size=2048,  # Does not matter.
+      eval_batch_size=2048,  # Does not matter.
+  )
+  return est.export_saved_model(export_dir_base, serving_input_receiver_fn,
+                                assets_extra, as_text, checkpoint_path)
+
+
+def model_fn_inference_on_tpu(model_fn,
+                              features,
+                              labels=None,
+                              config=None,
+                              params=None,
+                              batch_config=None):
+  """Convenience wrapper for export_saved_model API v2 for a model_fn.
+  WARNING:THIS METHOD IS DEPRECATED AND NOT PART OF THE APIS.
+
+  Make sure to set
+  `export_saved_model_api_version=tpu_estimator.ExportSavedModelApiVersion.V2`
+  when initializing TPUEstimator (default API version is V1). This is because
+  1) `tpu.rewrite` (or `tpu.compile`) shouldn't be called in a nested way
+      (otherwise validation will throw error like
+      "NotImplementedError: tpu_shard_context cannot be nested.")
+  2) When using V1 API, Estimator calls `tpu.rewrite` so
+     using `model_fn_inference_on_tpu` will trigger a nested call.
+     When using V2 API, users of Estimator needs to call `tpu.rewrite` (which
+     the wrapper does).
+
+  It attempts to execute the entire model function on the TPU for prediction.
+  Note that this does not support features which are SparseTensors. If you have
+  SparseTensor features, consider partitioning your model function further and
+  use inference_on_tpu.
+
+  Args:
+    model_fn: the model_fn for which we want to inference on TPU.
+    features: a tensor or dict of tensors, serves as the feature inputs to the
+      model.
+    labels: a tensor or dict of tensors, serves as the labels inputs to the
+      model.
+    config: auxiliary config to the Estimator.
+    params: hparams that we want to pass to the model_fn.
+    batch_config: a named tuple to wrap the inference batching configuration
+      inputs.
+
+  Returns:
+    An EstimatorSpec containing the outputs in export_outputs and predictions.
+  """
+  computation, capture = _build_computation_for_inference(
+      model_fn, labels, config, params)
+  tensors = call_computation(features, computation, batch_config=batch_config)
+  estimator_spec, export_outputs_dict, predictions_dict, none_indices = (
+      capture.get())
+  predictions_list = tensors[:len(predictions_dict)]
+  export_outputs_list_without_none = tensors[len(predictions_dict):]
+
+  # Reinsert `None`s which we've taken out in
+  # `_build_computation_for_inference()`.
+  export_outputs_list = []
+  while none_indices or export_outputs_list_without_none:
+    if none_indices and none_indices[0] == len(export_outputs_list):
+      export_outputs_list.append(None)
+      none_indices.pop(0)
+    else:
+      export_outputs_list.append(export_outputs_list_without_none.pop(0))
+
+  # Reconstruct `export_outputs` with updated tensors.
+  new_export_outputs_dict = tf.nest.pack_sequence_as(export_outputs_dict,
+                                                     export_outputs_list)
+  export_outputs = estimator_spec.export_outputs
+  new_export_outputs = collections.OrderedDict(
+      (k, _clone_export_output_with_tensors(export_outputs[k], v))
+      for k, v in six.iteritems(new_export_outputs_dict))
+  # Reconstruct `predictions` with updated tensors.
+  new_predictions = tf.nest.pack_sequence_as(predictions_dict, predictions_list)
+  if (len(new_predictions) == 1 and
+      _KEY_WHEN_PREDICTIONS_IS_A_TENSOR in new_predictions):
+    new_predictions = new_predictions[_KEY_WHEN_PREDICTIONS_IS_A_TENSOR]
+
+  return estimator_spec._replace(
+      export_outputs=new_export_outputs, predictions=new_predictions)
+
+
+def _build_computation_for_inference(model_fn, labels, config, params):
+  """Builds the computation with calls the model_fn for inference."""
+  capture = _CapturedObject()
+
+  def computation(computation_input):
+    """Computation to be passed to `TPUPartitionedCall()`."""
+    tpu_computation, tpu_capture = _build_tpu_computation_for_inference(
+        model_fn, computation_input, labels, config, params)
+
+    tensors_on_cpu = tf.compat.v1.tpu.rewrite(tpu_computation)
+    tpu.prune_unconnected_ops_from_xla(tf.compat.v1.get_default_graph())
+
+    (estimator_spec, export_outputs_dict, export_outputs_list,
+     predictions_dict) = (
+         tpu_capture.get())
+    predictions_list = tensors_on_cpu[:len(predictions_dict)]
+    export_outputs_tpu_on_cpu_list = tensors_on_cpu[len(predictions_dict):]
+
+    # Reconstruct tensors used in export_outputs, with TPU tensors replaced
+    # with their CPU counterpart returned from `rewrite_for_inference()`.
+    # `function.Defun()` does not like `None`s in return values, so we leave
+    # `None`s out but record their positions for later reconstruction.
+    export_outputs_list_without_none = []
+    none_indices = []
+    for i, t in enumerate(export_outputs_list):
+      if t is None:
+        none_indices.append(i)
+      else:
+        export_outputs_list_without_none.append(
+            export_outputs_tpu_on_cpu_list.pop(0))
+
+    capture.capture(
+        (estimator_spec, export_outputs_dict, predictions_dict, none_indices))
+    return predictions_list + export_outputs_list_without_none
+
+  return computation, capture
+
+
+def _build_tpu_computation_for_inference(model_fn, features, labels, config,
+                                         params):
+  """Builds the TPU computation for inference on TPU."""
+  capture = _CapturedObject()
+
+  def computation():
+    """Compute tpu tensors used in export_outputs.
+
+    Passed to rewrite_for_inference so that model_fn will be called under
+    the rewriting contexts. Only tpu tensors are returned, but export_outputs
+    and scaffold are captured.
+
+    Returns:
+       A list of Tensors used in export_outputs and not marked for
+       outside_compilation.
+    """
+    # We should only call model fn once and it should be inside `computation`
+    # so that building the graph will happen under `rewrite_for_inference`.
+
+    model_fn_args = function_utils.fn_args(model_fn)
+    kwargs = {}
+    # Makes deep copy with `config` and params` in case user mutates them.
+    if 'labels' in model_fn_args:
+      kwargs['labels'] = labels
+    if 'mode' in model_fn_args:
+      kwargs['mode'] = model_fn_lib.ModeKeys.PREDICT
+    if 'config' in model_fn_args:
+      kwargs['config'] = config
+    if 'params' in model_fn_args:
+      kwargs['params'] = params
+    estimator_spec = model_fn(features, **kwargs)
+
+    # We pick the TPU tensors out from `export_output` and later return them
+    # from `computation` for rewriting.
+    export_outputs_dict = collections.OrderedDict(
+        (k, _export_output_to_tensors(v))
+        for k, v in six.iteritems(estimator_spec.export_outputs))
+    export_outputs_list = tf.nest.flatten(export_outputs_dict)
+    export_outputs_tpu_list = [t for t in export_outputs_list if t is not None]
+
+    if isinstance(estimator_spec.predictions, dict):
+      predictions_dict = collections.OrderedDict(
+          (k, v) for k, v in six.iteritems(estimator_spec.predictions))
+    else:
+      predictions_dict = {
+          _KEY_WHEN_PREDICTIONS_IS_A_TENSOR: estimator_spec.predictions
+      }
+    predictions_list = tf.nest.flatten(predictions_dict)
+
+    # We cannot return everything we want through the return values, so
+    # capture the rest here for later use.
+    capture.capture((estimator_spec, export_outputs_dict, export_outputs_list,
+                     predictions_dict))
+    return predictions_list + export_outputs_tpu_list
+
+  return computation, capture
+
+
+def inference_on_tpu(computation,
+                     inputs_to_tpu,
+                     num_batch_threads,
+                     max_batch_size,
+                     batch_timeout_micros,
+                     allowed_batch_sizes=None,
+                     max_enqueued_batches=100):
+  """Convenient wrapper for export_saved_model API v2 to wrap TPU computation.
+
+  WARNING: THIS METHOD IS DEPRECATED AND NOT PART OF THE APIS.
+
+  Make sure to set
+  `export_saved_model_api_version=tpu_estimator.ExportSavedModelApiVersion.V2`
+  when initializing TPUEstimator (default API version is V1). This is because
+  1) `tpu.rewrite` (or `tpu.compile`) shouldn't be called in a nested way
+      (otherwise validation will throw error like
+      "NotImplementedError: tpu_shard_context cannot be nested.")
+  2) When using V1 API, Estimator calls `tpu.rewrite` so
+     using `model_fn_inference_on_tpu` will trigger a nested call.
+     When using V2 API, users of Estimator needs to call `tpu.rewrite` (which
+     the wrapper does).
+
+  It puts computation on TPU, add batching around it and round robin computation
+  between TPU cores.
+
+  See tpu_estimator_test.py for an example.
+
+  Args:
+    computation: computation to be put on TPU, which takes inputs_to_tpu as
+      arguments.
+    inputs_to_tpu: a list of tensors as input to computation.
+    num_batch_threads: Number of scheduling threads for processing batches of
+      work. Determines the number of batches processed in parallel.
+    max_batch_size: Batch sizes will never be bigger than this. If None or 0,
+      no batching will done.
+    batch_timeout_micros: Maximum number of microseconds to wait before
+      outputting an incomplete batch.
+    allowed_batch_sizes: Optional list of allowed batch sizes. If left empty,
+      does nothing. Otherwise, supplies a list of batch sizes, causing the op to
+      pad batches up to one of those sizes. The entries must increase
+      monotonically, and the final entry must equal max_batch_size.
+    max_enqueued_batches: The maximum depth of the batch queue. Defaults to 100.
+
+  Returns:
+    The unbatched computation output Tensors.
+  """
+
+  def _tpu_call(args):
+    """Function to either call or feed into BatchFunction."""
+
+    @function.Defun(capture_resource_var_by_value=False)
+    def tpu_computation():
+      """Function to feed into the TPUPartitionedCallOp."""
+      tensors_on_cpu = tf.compat.v1.tpu.rewrite(computation, args)
+      tpu.prune_unconnected_ops_from_xla(ops.get_default_graph())
+      return tensors_on_cpu
+
+    return tpu_functional.TPUPartitionedCall(
+        args=tpu_computation.captured_inputs,
+        device_ordinal=tpu_ops.tpu_ordinal_selector(),
+        Tout=[o.type for o in tpu_computation.definition.signature.output_arg],
+        f=tpu_computation)
+
+  if not max_batch_size:
+    return _tpu_call(inputs_to_tpu)
+
+  @tf.nondifferentiable_batch_function(num_batch_threads, max_batch_size,
+                                       batch_timeout_micros,
+                                       allowed_batch_sizes,
+                                       max_enqueued_batches)
+  def batched_tpu_computation(*args):
+    """Function to feed into the BatchOp."""
+    return _tpu_call(args)
+
+  return batched_tpu_computation(*inputs_to_tpu)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/util.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/util.py
new file mode 100644
index 00000000..9ca6feef
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/util.py
@@ -0,0 +1,96 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ===================================================================
+"""Utilities for the functionalities."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import re
+import time
+import numpy as np
+import six
+import tensorflow as tf
+
+_ITERATIONS_PER_LOOP_VALUE_REGEX = re.compile(
+    r'^(?P<value>[1-9]\d*)((?P<suffix>[s|m|h])$|$)')
+
+IterationsPerLoopCounter = collections.namedtuple('IterationsPerLoopCounter',
+                                                  ['value', 'unit'])
+
+
+def check_positive_integer(value, name):
+  """Checks whether `value` is a positive integer."""
+  if not isinstance(value, (six.integer_types, np.integer)):
+    raise TypeError('{} must be int, got {}'.format(name, type(value)))
+
+  if value <= 0:
+    raise ValueError('{} must be positive, got {}'.format(name, value))
+
+
+def parse_iterations_per_loop(iterations_per_loop):
+  """Parses the `iterations_per_loop` value.
+
+  The parser expects the value of the `iterations_per_loop` value to be a
+  positive integer value with unit:`count` or time-based value `<N><s|m|h>`
+  where <N> is any positive integer and `s`, `m`, `h` are unit of time in
+  seconds, minutes, hours respectively. Examples of valid values: `3600s`, `60m`
+  , `1h`.
+
+  Args:
+    iterations_per_loop: Number of iterations or time alloted to spend on per
+      device loop.
+
+  Returns:
+    A dictionary of `value` and `unit`. The `unit` value can be either a raw
+    `count`, or time in `seconds`.
+    {
+      "value": <positive-integer>,
+      "unit": <unit: `count` | `seconds`>
+    }
+  """
+  m = _ITERATIONS_PER_LOOP_VALUE_REGEX.match(str(iterations_per_loop))
+  if m is None:
+    raise ValueError(
+        'Invalid TPUConfig `iterations_per_loop` value. Value must be positive '
+        'integer value or time-based value `<N><s|m|h>` where <N> is any'
+        'positive integer and `s`, `m`, `h` are unit of time in seconds, '
+        'minutes, hours respectively. Examples of valid values: `3600s`, `60m`,'
+        ' `1h`.')
+  unit_value = 'seconds' if m.group('suffix') in ['h', 'm', 's'] else 'count'
+  value = int(m.group('value'))
+  if m.group('suffix') == 'm':
+    value *= 60
+  elif m.group('suffix') == 'h':
+    value *= 3600
+  return IterationsPerLoopCounter(value, unit_value)
+
+
+# TODO(b/118302029) Remove this copy of MultiHostDatasetInitializerHook after we
+# release a tensorflow_estimator with MultiHostDatasetInitializerHook in
+# python/estimator/util.py.
+class MultiHostDatasetInitializerHook(tf.compat.v1.train.SessionRunHook):
+  """Creates a SessionRunHook that initializes all passed iterators."""
+
+  def __init__(self, dataset_initializers):
+    self._initializers = dataset_initializers
+
+  def after_create_session(self, session, coord):
+    del coord
+    start = time.time()
+    session.run(self._initializers)
+    tf.compat.v1.logging.info('Initialized dataset iterators in %d seconds',
+                              time.time() - start)
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/training.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/training.py
new file mode 100644
index 00000000..8f6e780d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/training.py
@@ -0,0 +1,1118 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Classes and functions related to train_and_evaluate."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import json
+import os
+import time
+
+import six
+import tensorflow as tf
+from tensorflow.core.protobuf import config_pb2
+from tensorflow.python.distribute import estimator_training as distribute_coordinator_training
+from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.training import basic_session_run_hooks
+from tensorflow.python.training import server_lib
+from tensorflow.python.util.tf_export import estimator_export
+from tensorflow_estimator.python.estimator import estimator as estimator_lib
+from tensorflow_estimator.python.estimator import exporter as exporter_lib
+from tensorflow_estimator.python.estimator import run_config as run_config_lib
+
+_MAX_DELAY_SECS = 60
+_DELAY_SECS_PER_WORKER = 5
+_TF_CONFIG_ENV = 'TF_CONFIG'
+_ENVIRONMENT_KEY = 'environment'
+_ENVIRONMENT_GOOGLE_VALUE = 'google'
+_TRAINER_JOBS = (run_config_lib.TaskType.CHIEF, run_config_lib.TaskType.MASTER,
+                 run_config_lib.TaskType.WORKER)
+
+
+def _validate_input_fn(input_fn):
+  """Validates the `input_fn`."""
+  if not callable(input_fn):
+    raise TypeError('`input_fn` must be callable, given: {}'.format(input_fn))
+
+
+def _validate_hooks(hooks):
+  """Validates the `hooks`."""
+  hooks = tuple(hooks or [])
+  for hook in hooks:
+    if not isinstance(hook, tf.compat.v1.train.SessionRunHook):
+      raise TypeError(
+          'All hooks must be `SessionRunHook` instances, given: {}'.format(
+              hook))
+  return hooks
+
+
+def _validate_saving_listeners(saving_listeners):
+  """Validates the `saving_listeners`."""
+  saving_listeners = tuple(saving_listeners or [])
+  for saving_listener in saving_listeners:
+    if not isinstance(saving_listener,
+                      tf.compat.v1.train.CheckpointSaverListener):
+      raise TypeError(
+          'All saving_listeners must be `CheckpointSaverListener` instances, '
+          'given: {}'.format(saving_listener))
+  return saving_listeners
+
+
+def _validate_exporters(exporters):
+  """Validates `exporters` and returns them as a tuple."""
+  if not exporters:
+    return ()
+
+  if isinstance(exporters, exporter_lib.Exporter):
+    exporters = [exporters]
+
+  unique_names = []  # `Exporter`s should have unique names.
+  try:
+    for exporter in exporters:
+      if not isinstance(exporter, exporter_lib.Exporter):
+        # Error message will be printed out by the outer try/except.
+        raise TypeError
+
+      if not exporter.name:
+        full_list_of_names = [e.name for e in exporters]
+        raise ValueError('An Exporter cannot have a name that is `None` or'
+                         ' empty. All exporter names:'
+                         ' {}'.format(full_list_of_names))
+
+      if not isinstance(exporter.name, six.string_types):
+        raise ValueError('An Exporter must have a string name. Given: '
+                         '{}'.format(type(exporter.name)))
+
+      if exporter.name in unique_names:
+        full_list_of_names = [e.name for e in exporters]
+        raise ValueError(
+            '`exporters` must have unique names. Such a name cannot be `None`.'
+            ' All exporter names: {}'.format(full_list_of_names))
+      unique_names.append(exporter.name)
+  except TypeError:
+    # Two possibilities:
+    # - `exporters` is neither `Exporter` nor iterable.  Python has
+    #   raised a `TypeError` when iterating over `exporters`.
+    # - an `exporter` was None or not of type `Exporter`, so we raised a
+    #   `TypeError`.
+    raise TypeError('`exporters` must be an Exporter,'
+                    ' an iterable of Exporter, or `None`,'
+                    ' found %s.' % exporters)
+
+  return tuple(exporters)
+
+
+def _is_google_env():
+  """Detects whether current environment is google."""
+  tf_config = json.loads(os.environ.get(_TF_CONFIG_ENV) or '{}')
+  if not tf_config:
+    tf.compat.v1.logging.warn(
+        'TF_CONFIG should not be empty in distributed environment.')
+  return tf_config.get(_ENVIRONMENT_KEY) == _ENVIRONMENT_GOOGLE_VALUE
+
+
+@estimator_export('estimator.TrainSpec')
+class TrainSpec(
+    collections.namedtuple(
+        'TrainSpec', ['input_fn', 'max_steps', 'hooks', 'saving_listeners'])):
+  """Configuration for the "train" part for the `train_and_evaluate` call.
+
+  `TrainSpec` determines the input data for the training, as well as the
+  duration. Optional hooks run at various stages of training.
+
+  Usage:
+
+  >>> train_spec = tf.estimator.TrainSpec(
+  ...    input_fn=lambda: 1,
+  ...    max_steps=100,
+  ...    hooks=[_StopAtSecsHook(stop_after_secs=10)],
+  ...    saving_listeners=[_NewCheckpointListenerForEvaluate(None, 20, None)])
+  >>> train_spec.saving_listeners[0]._eval_throttle_secs
+  20
+  >>> train_spec.hooks[0]._stop_after_secs
+  10
+  >>> train_spec.max_steps
+  100
+  """
+
+  def __new__(cls, input_fn, max_steps=None, hooks=None, saving_listeners=None):
+    """Creates a validated `TrainSpec` instance.
+
+    Args:
+      input_fn: A function that provides input data for training as minibatches.
+        See [Premade Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+          for more information. The function should construct and return one of
+        the following:
+          * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
+            tuple (features, labels) with same constraints as below.
+          * A tuple (features, labels): Where features is a `Tensor` or a
+            dictionary of string feature name to `Tensor` and labels is a
+            `Tensor` or a dictionary of string label name to `Tensor`.
+      max_steps: Int. Positive number of total steps for which to train model.
+        If `None`, train forever. The training `input_fn` is not expected to
+        generate `OutOfRangeError` or `StopIteration` exceptions. See the
+        `train_and_evaluate` stop condition section for details.
+      hooks: Iterable of `tf.train.SessionRunHook` objects to run on all workers
+        (including chief) during training.
+      saving_listeners: Iterable of `tf.estimator.CheckpointSaverListener`
+        objects to run on chief during training.
+
+    Returns:
+      A validated `TrainSpec` object.
+
+    Raises:
+      ValueError: If any of the input arguments is invalid.
+      TypeError: If any of the arguments is not of the expected type.
+    """
+    # Validate input_fn.
+    _validate_input_fn(input_fn)
+
+    # Validate max_steps.
+    if max_steps is not None and max_steps <= 0:
+      raise ValueError(
+          'Must specify max_steps > 0, given: {}'.format(max_steps))
+
+    # Validate hooks.
+    hooks = _validate_hooks(hooks)
+
+    # Validate saving_listeners.
+    saving_listeners = _validate_saving_listeners(saving_listeners)
+
+    return super(TrainSpec, cls).__new__(
+        cls, input_fn=input_fn, max_steps=max_steps, hooks=hooks,
+        saving_listeners=saving_listeners)
+
+
+@estimator_export('estimator.EvalSpec')
+class EvalSpec(
+    collections.namedtuple('EvalSpec', [
+        'input_fn', 'steps', 'name', 'hooks', 'exporters', 'start_delay_secs',
+        'throttle_secs'
+    ])):
+  """Configuration for the "eval" part for the `train_and_evaluate` call.
+
+  `EvalSpec` combines details of evaluation of the trained model as well as its
+  export. Evaluation consists of computing metrics to judge the performance of
+  the trained model.  Export writes out the trained model on to external
+  storage.
+  """
+
+  def __new__(cls,
+              input_fn,
+              steps=100,
+              name=None,
+              hooks=None,
+              exporters=None,
+              start_delay_secs=120,
+              throttle_secs=600):
+    """Creates a validated `EvalSpec` instance.
+
+    Args:
+      input_fn: A function that constructs the input data for evaluation. See
+        [Premade Estimators](
+        https://tensorflow.org/guide/premade_estimators#create_input_functions)
+          for more information. The function should construct and return one of
+        the following:
+          * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
+            tuple (features, labels) with same constraints as below.
+          * A tuple (features, labels): Where features is a `Tensor` or a
+            dictionary of string feature name to `Tensor` and labels is a
+            `Tensor` or a dictionary of string label name to `Tensor`.
+      steps: Int. Positive number of steps for which to evaluate model. If
+        `None`, evaluates until `input_fn` raises an end-of-input exception. See
+        `Estimator.evaluate` for details.
+      name: String. Name of the evaluation if user needs to run multiple
+        evaluations on different data sets. Metrics for different evaluations
+        are saved in separate folders, and appear separately in tensorboard.
+      hooks: Iterable of `tf.train.SessionRunHook` objects to run during
+        evaluation.
+      exporters: Iterable of `Exporter`s, or a single one, or `None`.
+        `exporters` will be invoked after each evaluation.
+      start_delay_secs: Int. Start evaluating after waiting for this many
+        seconds.
+      throttle_secs: Int. Do not re-evaluate unless the last evaluation was
+        started at least this many seconds ago. Of course, evaluation does not
+        occur if no new checkpoints are available, hence, this is the minimum.
+
+    Returns:
+      A validated `EvalSpec` object.
+
+    Raises:
+      ValueError: If any of the input arguments is invalid.
+      TypeError: If any of the arguments is not of the expected type.
+    """
+    # Validate input_fn.
+    _validate_input_fn(input_fn)
+
+    # Validate steps.
+    if steps is not None and steps <= 0:
+      raise ValueError('Must specify steps > 0, given: {}'.format(steps))
+
+    # Validate name.
+    if name is not None and not isinstance(name, six.string_types):
+      raise TypeError('`name` must be string, given: {}'.format(name))
+
+    # Validate hooks.
+    hooks = _validate_hooks(hooks)
+
+    # Validate exporters.
+    exporters = _validate_exporters(exporters)
+
+    # Validate start_delay_secs.
+    if start_delay_secs < 0:
+      raise ValueError('Must specify start_delay_secs >= 0, given: {}'.format(
+          start_delay_secs))
+
+    # Validate throttle_secs.
+    if throttle_secs < 0:
+      raise ValueError(
+          'Must specify throttle_secs >= 0, given: {}'.format(throttle_secs))
+
+    return super(EvalSpec, cls).__new__(
+        cls,
+        input_fn=input_fn,
+        steps=steps,
+        name=name,
+        hooks=hooks,
+        exporters=exporters,
+        start_delay_secs=start_delay_secs,
+        throttle_secs=throttle_secs)
+
+
+@estimator_export('estimator.train_and_evaluate')
+def train_and_evaluate(estimator, train_spec, eval_spec):
+  """Train and evaluate the `estimator`.
+
+  This utility function trains, evaluates, and (optionally) exports the model by
+  using the given `estimator`. All training related specification is held in
+  `train_spec`, including training `input_fn` and training max steps, etc. All
+  evaluation and export related specification is held in `eval_spec`, including
+  evaluation `input_fn`, steps, etc.
+
+  This utility function provides consistent behavior for both local
+  (non-distributed) and distributed configurations. The default distribution
+  configuration is parameter server-based between-graph replication. For other
+  types of distribution configurations such as all-reduce training, please use
+  [DistributionStrategies](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/distribute).
+
+  Overfitting: In order to avoid overfitting, it is recommended to set up the
+  training `input_fn` to shuffle the training data properly.
+
+  Stop condition: In order to support both distributed and non-distributed
+  configuration reliably, the only supported stop condition for model
+  training is `train_spec.max_steps`. If `train_spec.max_steps` is `None`, the
+  model is trained forever. *Use with care* if model stop condition is
+  different. For example, assume that the model is expected to be trained with
+  one epoch of training data, and the training `input_fn` is configured to throw
+  `OutOfRangeError` after going through one epoch, which stops the
+  `Estimator.train`. For a three-training-worker distributed configuration, each
+  training worker is likely to go through the whole epoch independently. So, the
+  model will be trained with three epochs of training data instead of one epoch.
+
+  Example of local (non-distributed) training:
+
+  ```python
+  # Set up feature columns.
+  categorial_feature_a = categorial_column_with_hash_bucket(...)
+  categorial_feature_a_emb = embedding_column(
+      categorical_column=categorial_feature_a, ...)
+  ...  # other feature columns
+
+  estimator = DNNClassifier(
+      feature_columns=[categorial_feature_a_emb, ...],
+      hidden_units=[1024, 512, 256])
+
+  # Or set up the model directory
+  #   estimator = DNNClassifier(
+  #       config=tf.estimator.RunConfig(
+  #           model_dir='/my_model', save_summary_steps=100),
+  #       feature_columns=[categorial_feature_a_emb, ...],
+  #       hidden_units=[1024, 512, 256])
+
+  # Input pipeline for train and evaluate.
+  def train_input_fn(): # returns x, y
+    # please shuffle the data.
+    pass
+  def eval_input_fn(): # returns x, y
+    pass
+
+  train_spec = tf.estimator.TrainSpec(input_fn=train_input_fn, max_steps=1000)
+  eval_spec = tf.estimator.EvalSpec(input_fn=eval_input_fn)
+
+  tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
+  ```
+  Note that in current implementation `estimator.evaluate` will be called
+  multiple times. This means that evaluation graph (including eval_input_fn)
+  will be re-created for each `evaluate` call. `estimator.train` will be called
+  only once.
+
+  Example of distributed training:
+
+  Regarding the example of distributed training, the code above can be used
+  without a change (Please do make sure that the `RunConfig.model_dir` for all
+  workers is set to the same directory, i.e., a shared file system all workers
+  can read and write). The only extra work to do is setting the environment
+  variable `TF_CONFIG` properly for each worker correspondingly.
+
+  Also see
+  [Distributed TensorFlow](https://www.tensorflow.org/deploy/distributed).
+
+  Setting environment variable depends on the platform. For example, on Linux,
+  it can be done as follows (`$` is the shell prompt):
+
+  ```
+  $ TF_CONFIG='<replace_with_real_content>' python train_model.py
+  ```
+
+  For the content in `TF_CONFIG`, assume that the training cluster spec looks
+  like:
+
+  ```
+  cluster = {"chief": ["host0:2222"],
+             "worker": ["host1:2222", "host2:2222", "host3:2222"],
+             "ps": ["host4:2222", "host5:2222"]}
+  ```
+
+  Example of `TF_CONFIG` for chief training worker (must have one and only one):
+
+  ```
+  # This should be a JSON string, which is set as environment variable. Usually
+  # the cluster manager handles that.
+  TF_CONFIG='{
+      "cluster": {
+          "chief": ["host0:2222"],
+          "worker": ["host1:2222", "host2:2222", "host3:2222"],
+          "ps": ["host4:2222", "host5:2222"]
+      },
+      "task": {"type": "chief", "index": 0}
+  }'
+  ```
+  Note that the chief worker also does the model training job, similar to other
+  non-chief training workers (see next paragraph). In addition to the model
+  training, it manages some extra work, e.g., checkpoint saving and restoring,
+  writing summaries, etc.
+
+  Example of `TF_CONFIG` for non-chief training worker (optional, could be
+  multiple):
+
+  ```
+  # This should be a JSON string, which is set as environment variable. Usually
+  # the cluster manager handles that.
+  TF_CONFIG='{
+      "cluster": {
+          "chief": ["host0:2222"],
+          "worker": ["host1:2222", "host2:2222", "host3:2222"],
+          "ps": ["host4:2222", "host5:2222"]
+      },
+      "task": {"type": "worker", "index": 0}
+  }'
+  ```
+  where the `task.index` should be set as 0, 1, 2, in this example, respectively
+  for non-chief training workers.
+
+  Example of `TF_CONFIG` for parameter server, aka ps (could be multiple):
+
+  ```
+  # This should be a JSON string, which is set as environment variable. Usually
+  # the cluster manager handles that.
+  TF_CONFIG='{
+      "cluster": {
+          "chief": ["host0:2222"],
+          "worker": ["host1:2222", "host2:2222", "host3:2222"],
+          "ps": ["host4:2222", "host5:2222"]
+      },
+      "task": {"type": "ps", "index": 0}
+  }'
+  ```
+  where the `task.index` should be set as 0 and 1, in this example, respectively
+  for parameter servers.
+
+  Example of `TF_CONFIG` for evaluator task. Evaluator is a special task that is
+  not part of the training cluster. There could be only one. It is used for
+  model evaluation.
+
+  ```
+  # This should be a JSON string, which is set as environment variable. Usually
+  # the cluster manager handles that.
+  TF_CONFIG='{
+      "cluster": {
+          "chief": ["host0:2222"],
+          "worker": ["host1:2222", "host2:2222", "host3:2222"],
+          "ps": ["host4:2222", "host5:2222"]
+      },
+      "task": {"type": "evaluator", "index": 0}
+  }'
+  ```
+
+  When `distribute` or `experimental_distribute.train_distribute` and
+  `experimental_distribute.remote_cluster` is set, this method will start a
+  client running on the current host which connects to the `remote_cluster` for
+  training and evaluation.
+
+  Args:
+    estimator: An `Estimator` instance to train and evaluate.
+    train_spec: A `TrainSpec` instance to specify the training specification.
+    eval_spec: A `EvalSpec` instance to specify the evaluation and export
+      specification.
+
+  Returns:
+    A tuple of the result of the `evaluate` call to the `Estimator` and the
+    export results using the specified `Exporter`s.
+    Currently, the return value is undefined for distributed training mode.
+
+  Raises:
+    ValueError: if environment variable `TF_CONFIG` is incorrectly set.
+  """
+  _assert_eval_spec(eval_spec)  # fail fast if eval_spec is invalid.
+  estimator_lib._estimator_api_gauge.get_cell('train_and_evaluate').set(True)  # pylint: disable=protected-access
+
+  executor = _TrainingExecutor(
+      estimator=estimator, train_spec=train_spec, eval_spec=eval_spec)
+  config = estimator.config
+
+  # If `distribute_coordinator_mode` is set and running in distributed
+  # environment, we run `train_and_evaluate` via distribute coordinator.
+  if distribute_coordinator_training.should_run_distribute_coordinator(config):
+    tf.compat.v1.logging.info(
+        'Running `train_and_evaluate` with Distribute Coordinator.')
+    distribute_coordinator_training.train_and_evaluate(estimator, train_spec,
+                                                       eval_spec,
+                                                       _TrainingExecutor)
+    return
+
+  if (config.task_type == run_config_lib.TaskType.EVALUATOR and
+      config.task_id > 0):
+    raise ValueError(
+        'For distributed training, there can only be one `evaluator` task '
+        '(with task id 0).  Given task id {}'.format(config.task_id))
+
+  return executor.run()
+
+
+class _StopAtSecsHook(tf.compat.v1.train.SessionRunHook):
+  """Stops given secs after begin is called."""
+
+  def __init__(self, stop_after_secs):
+    self._stop_after_secs = stop_after_secs
+    self._start_time = None
+
+  def begin(self):
+    self._start_time = time.time()
+
+  def after_run(self, run_context, run_values):
+    del run_values
+    if time.time() - self._start_time >= self._stop_after_secs:
+      run_context.request_stop()
+
+
+class _NewCheckpointListenerForEvaluate(
+    tf.compat.v1.train.CheckpointSaverListener):
+  """A saver listener to run evaluate with every checkpoint."""
+
+  def __init__(self, evaluator, eval_throttle_secs, continuous_eval_listener):
+    self._evaluator = evaluator
+    self._eval_throttle_secs = eval_throttle_secs
+    self._continuous_eval_listener = continuous_eval_listener
+    self.eval_result, self.export_results = None, None
+
+  def begin(self):
+    self._timer = basic_session_run_hooks.SecondOrStepTimer(
+        every_secs=self._eval_throttle_secs)
+    self._is_first_run = True
+
+  def after_save(self, session, global_step_value):
+    del session  # unused; required by signature.
+    # skip first run model is not trained yet.
+    if self._is_first_run:
+      self._is_first_run = False
+      return
+
+    if not self._continuous_eval_listener.before_eval():
+      tf.compat.v1.logging.info(
+          'Exiting training and evaluation loop, as requested by '
+          '_ContinuousEvalListener.before_eval.')
+      return True
+    if self._timer.should_trigger_for_step(global_step_value):
+      self._evaluate(global_step_value)  # updates self.eval_result
+      if not self._continuous_eval_listener.after_eval(self.eval_result):
+        tf.compat.v1.logging.info('Exiting evaluation, as requested by '
+                                  '_ContinuousEvalListener.after_eval.')
+        return True
+    else:
+      # TODO(ispir): add remaining time in the log.
+      tf.compat.v1.logging.info(
+          'Skip the current checkpoint eval due to throttle secs '
+          '({} secs).'.format(self._eval_throttle_secs))
+
+  def end(self, session, global_step_value):
+    # Evaluate if the last step has not been evaluated, yet.
+    if global_step_value != self._timer.last_triggered_step():
+      if self._continuous_eval_listener.before_eval():
+        self._evaluate(global_step_value)
+        self._continuous_eval_listener.after_eval(self.eval_result)
+
+  def _evaluate(self, global_step_value):
+    self._timer.update_last_triggered_step(global_step_value)
+    self.eval_result, self.export_results = (
+        self._evaluator.evaluate_and_export())
+    if self.eval_result.status != _EvalStatus.EVALUATED:
+      #  This is unexpected; should never happen.
+      #  Training should always end with a new checkpoint.
+      raise RuntimeError('There was no new checkpoint after the training. '
+                         'Eval status: {}'.format(self.eval_result.status))
+
+
+class _TrainingExecutor(object):
+  """The executor to run `Estimator` training and evaluation.
+
+  This implementation supports both distributed and non-distributed (aka local)
+  training and evaluation based on the setting in `tf.estimator.RunConfig`.
+  """
+
+  def __init__(self,
+               estimator,
+               train_spec,
+               eval_spec,
+               train_hooks=None,
+               continuous_eval_listener=None):
+    if not isinstance(estimator,
+                      (estimator_lib.Estimator, estimator_lib.EstimatorV2)):
+      raise TypeError('`estimator` must have type `tf.estimator.Estimator`. '
+                      'Got: {}'.format(type(estimator)))
+    self._estimator = estimator
+
+    if not isinstance(train_spec, TrainSpec):
+      raise TypeError('`train_spec` must have type `tf.estimator.TrainSpec`. '
+                      'Got: {}'.format(type(train_spec)))
+    self._train_spec = train_spec
+
+    if eval_spec and not isinstance(eval_spec, EvalSpec):
+      raise TypeError('`eval_spec` must be either `None` or have type '
+                      '`tf.estimator.EvalSpec`. Got: {}'.format(
+                          type(eval_spec)))
+    self._eval_spec = eval_spec
+
+    self._train_hooks = _validate_hooks(train_hooks)
+
+    if (continuous_eval_listener and
+        not isinstance(continuous_eval_listener, _ContinuousEvalListener)):
+      raise TypeError('`continuous_eval_listener` must have type '
+                      '`_ContinuousEvalListener`.')
+    self._continuous_eval_listener = (
+        continuous_eval_listener or _ContinuousEvalListener())
+
+  @property
+  def estimator(self):
+    return self._estimator
+
+  def run(self):
+    """Executes the run_foo for task type `foo`.
+
+    `_TrainingExecutor` predefines the procedure for task type 'chief',
+    'worker', 'ps', and 'evaluator'. For task type `foo`, the corresponding
+    procedure is `run_foo'. This `run` method invoke the procedure base on the
+    `RunConfig.task_type`.
+
+    Returns:
+      A tuple of the result of the `evaluate` call to the `Estimator` and the
+      export results using the specified `ExportStrategy`.
+      Currently undefined for distributed training mode.
+
+    Raises:
+      ValueError: if the estimator.config is mis-configured.
+    """
+    config = self._estimator.config
+
+    if (not config.cluster_spec and
+        config.task_type != run_config_lib.TaskType.EVALUATOR):
+      tf.compat.v1.logging.info(
+          'Running training and evaluation locally (non-distributed).')
+      return self.run_local()
+
+    # Distributed case.
+    if not config.task_type:
+      # TODO(xiejw): Improve the error message about how to set the TF_CONFIG
+      # correctly.
+      raise ValueError(
+          '`estimator.config` must have task_type set. This usually means '
+          'TF_CONFIG environment is not set correctly.')
+
+    if config.task_type == 'local':
+      raise ValueError(
+          '`task.type` in TF_CONFIG cannot be `local`. Leaving `cluster` and '
+          '`task` properties in TF_CONFIG absent triggers train and evaluate '
+          '`Estimator` locally (non-distributed).')
+
+    # For task type foo, call executor.run_foo.
+    available_tasks = [
+        x for x in dir(self) if x.startswith('run_') and x != 'run_local' and
+        callable(getattr(self, x))
+    ]
+    task_to_run = 'run_' + config.task_type
+    if task_to_run not in available_tasks:
+      raise ValueError(
+          'Task type {} is not supported. Supported task types are {}'.format(
+              config.task_type, [x[len('run_'):] for x in available_tasks]))
+    getattr(self, task_to_run)()
+
+  def run_chief(self):
+    """Runs task chief."""
+    # TODO(xiejw): To allow execution framework to add train hooks.
+    return self._start_distributed_training(
+        saving_listeners=self._train_spec.saving_listeners)
+
+  def run_worker(self):
+    """Runs task (training) worker."""
+    # TODO(xiejw): To allow execution framework to add train hooks.
+    return self._start_distributed_training()
+
+  def run_master(self):
+    """Runs task master."""
+    _assert_eval_spec(self._eval_spec)
+
+    # Final export signal: For any eval result with global_step >= train
+    # max_steps, the evaluator will send the final export signal. There is a
+    # small chance that the Estimator.train stopping logic sees a different
+    # global_step value (due to global step race condition and the fact the
+    # saver sees a larger value for checkpoint saving), which does not end
+    # the training. When the training ends, a new checkpoint is generated, which
+    # triggers the listener again. So, it could be the case the final export is
+    # triggered twice.
+    #
+    # But here, throttle_secs will skip the next intermediate checkpoint and,
+    # so, the double final export chance is very small.
+    evaluator = _TrainingExecutor._Evaluator(self._estimator, self._eval_spec,
+                                             self._train_spec.max_steps)
+
+    # When the underlying `Estimator` object saves a new checkpoint, we would
+    # like this callback to be called so that evaluation and export can trigger.
+    saving_listeners = self._train_spec.saving_listeners + tuple(
+        [_NewCheckpointListenerForEvaluate(evaluator,
+                                           self._eval_spec.throttle_secs,
+                                           _ContinuousEvalListener())])
+    self._start_distributed_training(saving_listeners=saving_listeners)
+
+  def run_evaluator(self):
+    """Runs task evaluator."""
+    # TODO(xiejw): To allow execution framework to add continuous eval listener.
+    return self._start_continuous_evaluation()
+
+  def run_ps(self):
+    """Runs task parameter server (in training cluster spec)."""
+    config = self._estimator.config
+    server = self._start_std_server(config)
+    server.join()
+
+  def run_local(self):
+    """Runs training and evaluation locally (non-distributed)."""
+    _assert_eval_spec(self._eval_spec)
+
+    train_hooks = list(self._train_spec.hooks) + list(self._train_hooks)
+    tf.compat.v1.logging.info(
+        'Start train and evaluate loop. The evaluate will happen '
+        'after every checkpoint. Checkpoint frequency is determined '
+        'based on RunConfig arguments: save_checkpoints_steps {} or '
+        'save_checkpoints_secs {}.'.format(
+            self._estimator.config.save_checkpoints_steps,
+            self._estimator.config.save_checkpoints_secs))
+
+    evaluator = _TrainingExecutor._Evaluator(self._estimator, self._eval_spec,
+                                             self._train_spec.max_steps)
+
+    listener_for_eval = _NewCheckpointListenerForEvaluate(
+        evaluator, self._eval_spec.throttle_secs,
+        self._continuous_eval_listener)
+    saving_listeners = self._train_spec.saving_listeners + (listener_for_eval,)
+
+    self._estimator.train(
+        input_fn=self._train_spec.input_fn,
+        max_steps=self._train_spec.max_steps,
+        hooks=train_hooks,
+        saving_listeners=saving_listeners)
+
+    eval_result = listener_for_eval.eval_result or _EvalResult(
+        status=_EvalStatus.MISSING_CHECKPOINT)
+    return eval_result.metrics, listener_for_eval.export_results
+
+  def _start_std_server(self, config):
+    """Creates, starts, and returns a server_lib.Server."""
+    if (not config.cluster_spec or not config.task_type or
+        config.task_id is None):
+      raise RuntimeError('Could not start server; be sure to specify '
+                         'cluster_spec, task_type, and task in '
+                         'RunConfig or set the TF_CONFIG environment variable.')
+
+    if not config.master:
+      jobs = config.cluster_spec.jobs
+      if (len(jobs) == 1 and
+          len(config.cluster_spec.job_tasks(jobs[0])) == 1 and
+          config.task_type in _TRAINER_JOBS):
+        # For distributed training, config.master is empty if and only if it has
+        # a single node in the cluster spec. In this case, we should not start
+        # the server.
+        tf.compat.v1.logging.info(
+            'Skip starting Tensorflow server as there is only one '
+            'node in the cluster.')
+        return
+      else:
+        raise RuntimeError(
+            'Could not start server; be sure to specify master in '
+            'RunConfig or set the TF_CONFIG environment variable.')
+
+    tf.compat.v1.logging.info('Start Tensorflow server.')
+
+    if config.session_config is None:
+      session_config = config_pb2.ConfigProto(log_device_placement=False)
+    else:
+      session_config = config_pb2.ConfigProto(
+          log_device_placement=False,
+          gpu_options=config.session_config.gpu_options)
+
+    server = server_lib.Server(
+        config.cluster_spec,
+        job_name=config.task_type,
+        task_index=config.task_id,
+        config=session_config,
+        start=False,
+        protocol=config.protocol)
+    server.start()
+    return server
+
+  def _start_distributed_training(self, saving_listeners=None):
+    """Calls `Estimator` train in a distributed setting."""
+    config = self._estimator.config
+
+    # Start in-process TensorFlow server if needed. It's important to start the
+    # server before we (optionally) sleep. Otherwise, the servers will wait to
+    # connect to each other before starting to train.
+    if not _is_google_env():
+      self._start_std_server(config)
+
+    # Delay worker to start. For asynchronous training, this usually helps model
+    # to converge faster.  Chief starts the training immediately, so, worker
+    # with task id x (0-based) should wait (x+1) * _DELAY_SECS_PER_WORKER.
+    start_delay_secs = 0
+    if config.task_type == run_config_lib.TaskType.WORKER:
+      # TODO(xiejw): Replace the hard code logic (task_id + 1) with unique id in
+      # training cluster.
+
+      max_delay_secs = _MAX_DELAY_SECS
+      if config.experimental_max_worker_delay_secs is not None:
+        max_delay_secs = int(config.experimental_max_worker_delay_secs)
+
+      start_delay_secs = min(max_delay_secs,
+                             (config.task_id + 1) * _DELAY_SECS_PER_WORKER)
+    if start_delay_secs > 0:
+      tf.compat.v1.logging.info('Waiting %d secs before starting training.',
+                                start_delay_secs)
+      time.sleep(start_delay_secs)
+
+    self._estimator.train(
+        input_fn=self._train_spec.input_fn,
+        max_steps=self._train_spec.max_steps,
+        hooks=list(self._train_spec.hooks) + list(self._train_hooks),
+        saving_listeners=saving_listeners)
+
+  def _start_continuous_evaluation(self):
+    """Repeatedly calls `Estimator` evaluate and export until training ends."""
+
+    _assert_eval_spec(self._eval_spec)
+
+    start_delay_secs = self._eval_spec.start_delay_secs
+    if start_delay_secs:
+      tf.compat.v1.logging.info('Waiting %f secs before starting eval.',
+                                start_delay_secs)
+      time.sleep(start_delay_secs)
+
+    latest_eval_result = None
+    evaluator = _TrainingExecutor._Evaluator(self._estimator, self._eval_spec,
+                                             self._train_spec.max_steps)
+
+    should_early_stop = False
+    while not should_early_stop:
+      if (latest_eval_result and
+          latest_eval_result.status == _EvalStatus.EVALUATED):
+        global_step = latest_eval_result.metrics.get(
+            tf.compat.v1.GraphKeys.GLOBAL_STEP)
+        if (global_step and self._train_spec.max_steps and
+            global_step >= self._train_spec.max_steps):
+          logging.info(
+              'Exiting evaluation, global_step=%s >= train max_steps=%s',
+              global_step, self._train_spec.max_steps)
+          return
+
+      latest_eval_result, should_early_stop = self._execute_evaluator_once(
+          evaluator, self._continuous_eval_listener,
+          self._eval_spec.throttle_secs)
+
+  def _execute_evaluator_once(self, evaluator, continuous_eval_listener,
+                              throttle_secs):
+    """Executes the `evaluator`."""
+
+    _assert_eval_spec(self._eval_spec)
+
+    start = time.time()
+
+    eval_result = None
+    should_early_stop = False
+
+    if not continuous_eval_listener.before_eval():
+      tf.compat.v1.logging.info('Exiting evaluation, as requested by '
+                                '_ContinuousEvalListener.before_eval.')
+      should_early_stop = True
+      return (eval_result, should_early_stop)
+
+    # Final export signal: For any eval result with global_step >= train
+    # max_steps, the evaluator will send the final export signal. The next
+    # iteration of while loop will end the continuous eval as the stopping
+    # condition is satisfied (both checks use the same global_step value,
+    # i.e., no race condition)
+    eval_result, _ = evaluator.evaluate_and_export()
+
+    if not self._continuous_eval_listener.after_eval(eval_result):
+      tf.compat.v1.logging.info('Exiting evaluation, as requested by '
+                                '_ContinuousEvalListener.after_eval.')
+      should_early_stop = True
+      return (eval_result, should_early_stop)
+
+    # Throttle if necessary.
+    elapsed_time = time.time() - start
+    difference = throttle_secs - elapsed_time
+    if difference > 0:
+      logging.info('Waiting %f secs before starting next eval run.', difference)
+      time.sleep(difference)
+    elif (throttle_secs == 0 and eval_result.status != _EvalStatus.EVALUATED):
+      # Prints a user-actionable warning to avoid unnecessary load on evaluator.
+      logging.warning(
+          'EvalSpec.throttle_secs is set as 0. This might overload the job '
+          'before finding (next) new checkpoint. Please consider to increase '
+          'it.')
+
+    return (eval_result, should_early_stop)
+
+  class _Evaluator(object):
+    """A helper class to call `Estimator.evaluate` and export model."""
+
+    def __init__(self, estimator, eval_spec, max_training_steps):
+      self._estimator = estimator
+
+      _assert_eval_spec(eval_spec)
+      self._eval_spec = eval_spec
+
+      self._is_final_export_triggered = False
+      self._previous_ckpt_path = None
+      self._last_warning_time = 0
+      self._max_training_steps = max_training_steps
+
+    @property
+    def is_final_export_triggered(self):
+      return self._is_final_export_triggered
+
+    def evaluate_and_export(self):
+      """Evaluate and (maybe) export the current model.
+
+      Returns:
+        A tuple of `EvalResult` instance and the export results.
+
+      Raises:
+        RuntimeError: for any unexpected internal error.
+        TypeError: if evaluation result has wrong type.
+      """
+      latest_ckpt_path = self._estimator.latest_checkpoint()
+      if not latest_ckpt_path:
+        self._log_err_msg('Estimator is not trained yet. Will start an '
+                          'evaluation when a checkpoint is ready.')
+        return _EvalResult(status=_EvalStatus.MISSING_CHECKPOINT), []
+
+      if latest_ckpt_path == self._previous_ckpt_path:
+        self._log_err_msg(
+            'No new checkpoint ready for evaluation. Skip the current '
+            'evaluation pass as evaluation results are expected to be same '
+            'for the same checkpoint.')
+        return _EvalResult(status=_EvalStatus.NO_NEW_CHECKPOINT), []
+
+      metrics = self._estimator.evaluate(
+          input_fn=self._eval_spec.input_fn,
+          steps=self._eval_spec.steps,
+          name=self._eval_spec.name,
+          checkpoint_path=latest_ckpt_path,
+          hooks=self._eval_spec.hooks)
+
+      # _EvalResult validates the metrics.
+      eval_result = _EvalResult(
+          status=_EvalStatus.EVALUATED,
+          metrics=metrics,
+          checkpoint_path=latest_ckpt_path)
+
+      is_the_final_export = (
+          eval_result.metrics[tf.compat.v1.GraphKeys.GLOBAL_STEP] >=
+          self._max_training_steps if self._max_training_steps else False)
+      export_results = self._export_eval_result(eval_result,
+                                                is_the_final_export)
+
+      if is_the_final_export:
+        tf.compat.v1.logging.debug(
+            'Calling exporter with the `is_the_final_export=True`.')
+        self._is_final_export_triggered = True
+
+      self._last_warning_time = 0
+      self._previous_ckpt_path = latest_ckpt_path
+      return eval_result, export_results
+
+    def _log_err_msg(self, message):
+      """Prints warning `message` every 10 mins."""
+      current_time = time.time()
+      if current_time - self._last_warning_time > 600:
+        logging.warning(message)
+        self._last_warning_time = current_time
+
+    def _export_eval_result(self, eval_result, is_the_final_export):
+      """Export `eval_result` according to exporters in `EvalSpec`."""
+      export_dir_base = os.path.join(
+          tf.compat.as_str_any(self._estimator.model_dir),
+          tf.compat.as_str_any('export'))
+
+      export_results = []
+      for exporter in self._eval_spec.exporters:
+        export_results.append(
+            exporter.export(
+                estimator=self._estimator,
+                export_path=os.path.join(
+                    tf.compat.as_str_any(export_dir_base),
+                    tf.compat.as_str_any(exporter.name)),
+                checkpoint_path=eval_result.checkpoint_path,
+                eval_result=eval_result.metrics,
+                is_the_final_export=is_the_final_export))
+      return export_results
+
+
+class _EvalStatus(object):
+  """The status of an evaluation event.
+
+  For local training and evaluation, the status can only be `EVALUATED` as
+  `Estimator.train` always generates a new checkpoint.
+
+  For distributed training and evaluation, a separated evaluator keeps looking
+  for new checkpoint. So, multiple situations might occur:
+
+  - EVALUATED: A new checkpoint is found since last evaluation.
+      `Estimator.evaluate` will be invoked.
+  - MISSING_CHECKPOINT: No checkpoint can be found. Typically, this means
+      the trainer has not yet produced any checkpoint.
+  - NO_NEW_CHECKPOINT: No new checkpoint can be found since last evaluation.
+      Typically, this means the trainer has not yet produced any new checkpoint.
+  """
+
+  EVALUATED = 'evaluated'
+  MISSING_CHECKPOINT = 'missing checkpoint'
+  NO_NEW_CHECKPOINT = 'no new checkpoint'
+
+
+class _EvalResult(
+    collections.namedtuple('EvalResult',
+                           ['status', 'metrics', 'checkpoint_path'])):
+  """_EvalResult holds the result of an evaluation event."""
+
+  def __new__(cls, status, metrics=None, checkpoint_path=None):
+    """Creates a validated `_EvalResult`.
+
+    Args:
+      status: See `_EvalStatus`.
+      metrics: The evaluation results returned by `Estimator.evaluate`. Only set
+        if status is `EVALUATED`.
+      checkpoint_path: The corresponding checkpoint path for the `metrics`. Only
+        set if status is `EVALUATED`.
+
+    Returns:
+      A validated `_EvalResult` object.
+
+    Raises:
+      ValueError: If validation fails.
+      TypeError: If any of the arguments is not the expected type.
+    """
+
+    if status != _EvalStatus.EVALUATED:
+      if metrics:
+        raise ValueError(
+            'metrics must be `None` if status is not {}; got status {},'
+            ' metrics {}'.format(_EvalStatus.EVALUATED, status, metrics))
+      if checkpoint_path:
+        raise ValueError(
+            'checkpoint must be `None` if status is not {}; got status {}, '
+            'checkpoint_path {}'.format(_EvalStatus.EVALUATED, status,
+                                        checkpoint_path))
+      return super(_EvalResult, cls).__new__(cls, status, metrics,
+                                             checkpoint_path)
+
+    # Now, evaluated case.
+    assert status == _EvalStatus.EVALUATED
+
+    # Validates metrics.
+    if not metrics:
+      raise ValueError(
+          'Internal error: `Estimator.evaluate` should never return empty '
+          'metrics.')
+    if not isinstance(metrics, dict):
+      raise TypeError(
+          '`Estimator.evaluate` should return dict. Given {}.'.format(
+              type(metrics)))
+    if tf.compat.v1.GraphKeys.GLOBAL_STEP not in metrics:
+      raise ValueError(
+          'Internal error: `Estimator.evaluate` result should have '
+          '`global_step` in result. Given {}'.format(metrics))
+
+    # Validates checkpoint_path.
+    if not checkpoint_path:
+      raise ValueError(
+          'Internal error: `checkpoint_path` should never be empty.')
+
+    return super(_EvalResult, cls).__new__(cls, status, metrics,
+                                           checkpoint_path)
+
+
+class _ContinuousEvalListener(object):
+  """Interface for listeners that take action before or after evaluation."""
+
+  def before_eval(self):
+    """Called before evaluation.
+
+    Returns:
+      `False` if you want to skip the current evaluation and early stop the
+      continuous evaluation; `True` otherwise.
+    """
+    return True
+
+  def after_eval(self, eval_result):
+    """Called after the evaluation is executed.
+
+    Args:
+      eval_result: An `_EvalResult` instance.
+
+    Returns:
+      False if you want to early stop continuous evaluation; `True` otherwise.
+    """
+    del eval_result
+    return True
+
+
+def _assert_eval_spec(eval_spec):
+  """Raise error if `eval_spec` is not of the right type."""
+  if not isinstance(eval_spec, EvalSpec):
+    raise TypeError('`eval_spec` must have type `tf.estimator.EvalSpec`. '
+                    'Got: {}'.format(type(eval_spec)))
diff --git a/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/util.py b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/util.py
new file mode 100644
index 00000000..c766feb1
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/util.py
@@ -0,0 +1,114 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utilities for Estimators."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import time
+import tensorflow as tf
+from tensorflow.python.data.ops import dataset_ops
+from tensorflow.python.util import function_utils
+
+fn_args = function_utils.fn_args
+
+# When we create a timestamped directory, there is a small chance that the
+# directory already exists because another process is also creating these
+# directories. In this case we just wait one second to get a new timestamp and
+# try again. If this fails several times in a row, then something is seriously
+# wrong.
+MAX_DIRECTORY_CREATION_ATTEMPTS = 10
+
+
+def parse_input_fn_result(result):
+  """Gets features, labels, and hooks from the result of an Estimator input_fn.
+
+  Args:
+    result: output of an input_fn to an estimator, which should be one of:
+      * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a tuple
+        (features, labels) with same constraints as below.
+      * A tuple (features, labels): Where `features` is a `Tensor` or a
+        dictionary of string feature name to `Tensor` and `labels` is a `Tensor`
+        or a dictionary of string label name to `Tensor`. Both `features` and
+        `labels` are consumed by `model_fn`. They should satisfy the expectation
+        of `model_fn` from inputs.
+
+  Returns:
+    Tuple of features, labels, and input_hooks, where features are as described
+    above, labels are as described above or None, and input_hooks are a list
+    of SessionRunHooks to be included when running.
+
+  Raises:
+    ValueError: if the result is a list or tuple of length != 2.
+  """
+  input_hooks = []
+  if isinstance(result, dataset_ops.DatasetV2):
+    iterator = dataset_ops.make_initializable_iterator(result)
+    input_hooks.append(_DatasetInitializerHook(iterator))
+    result = iterator.get_next()
+  return parse_iterator_result(result) + (input_hooks,)
+
+
+def parse_iterator_result(result):
+  """Gets features, labels from result."""
+  if isinstance(result, (list, tuple)):
+    if len(result) != 2:
+      raise ValueError(
+          'input_fn should return (features, labels) as a len 2 tuple.')
+    return result[0], result[1]
+  return result, None
+
+
+class _DatasetInitializerHook(tf.compat.v1.train.SessionRunHook):
+  """Creates a SessionRunHook that initializes the passed iterator."""
+
+  def __init__(self, iterator):
+    self._iterator = iterator
+
+  def begin(self):
+    self._initializer = self._iterator.initializer
+
+  def after_create_session(self, session, coord):
+    del coord
+    session.run(self._initializer)
+
+
+class DistributedIteratorInitializerHook(tf.compat.v1.train.SessionRunHook):
+  """Creates a SessionRunHook that initializes the passed iterator."""
+
+  def __init__(self, iterator):
+    self._iterator = iterator
+
+  def begin(self):
+    self._initializer = self._iterator.initialize()
+
+  def after_create_session(self, session, coord):
+    del coord
+    session.run(self._initializer)
+
+
+class MultiHostDatasetInitializerHook(tf.compat.v1.train.SessionRunHook):
+  """Creates a SessionRunHook that initializes all passed iterators."""
+
+  def __init__(self, dataset_initializers):
+    self._initializers = dataset_initializers
+
+  def after_create_session(self, session, coord):
+    del coord
+    start = time.time()
+    session.run(self._initializers)
+    tf.compat.v1.logging.info('Initialized dataset iterators in %d seconds',
+                              time.time() - start)
diff --git a/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/PKG-INFO b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/PKG-INFO
new file mode 100644
index 00000000..7f7cf3bd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/PKG-INFO
@@ -0,0 +1,133 @@
+Metadata-Version: 2.1
+Name: termcolor
+Version: 1.1.0
+Summary: ANSII Color formatting for output in terminal.
+Home-page: http://pypi.python.org/pypi/termcolor
+Author: Konstantin Lepa
+Author-email: konstantin.lepa@gmail.com
+License: MIT
+Description: Example
+        =======
+            ::
+        
+                import sys
+                from termcolor import colored, cprint
+        
+                text = colored('Hello, World!', 'red', attrs=['reverse', 'blink'])
+                print(text)
+                cprint('Hello, World!', 'green', 'on_red')
+        
+                print_red_on_cyan = lambda x: cprint(x, 'red', 'on_cyan')
+                print_red_on_cyan('Hello, World!')
+                print_red_on_cyan('Hello, Universe!')
+        
+                for i in range(10):
+                    cprint(i, 'magenta', end=' ')
+        
+                cprint("Attention!", 'red', attrs=['bold'], file=sys.stderr)
+        
+        Text Properties
+        ===============
+        
+          Text colors:
+        
+              - grey
+              - red
+              - green
+              - yellow
+              - blue
+              - magenta
+              - cyan
+              - white
+        
+          Text highlights:
+        
+              - on_grey
+              - on_red
+              - on_green
+              - on_yellow
+              - on_blue
+              - on_magenta
+              - on_cyan
+              - on_white
+        
+          Attributes:
+        
+              - bold
+              - dark
+              - underline
+              - blink
+              - reverse
+              - concealed
+        
+        Terminal properties
+        ===================
+        
+            ============ ======= ==== ========= ========== ======= =========
+            Terminal     bold    dark underline blink      reverse concealed
+            ------------ ------- ---- --------- ---------- ------- ---------
+            xterm        yes     no   yes       bold       yes     yes
+            linux        yes     yes  bold      yes        yes     no
+            rxvt         yes     no   yes       bold/black yes     no
+            dtterm       yes     yes  yes       reverse    yes     yes
+            teraterm     reverse no   yes       rev/red    yes     no
+            aixterm      normal  no   yes       no         yes     yes
+            PuTTY        color   no   yes       no         yes     no
+            Windows      no      no   no        no         yes     no
+            Cygwin SSH   yes     no   color     color      color   yes
+            Mac Terminal yes     no   yes       yes        yes     yes
+            ============ ======= ==== ========= ========== ======= =========
+        
+        
+        CHANGES
+        =======
+        
+        1.1.0 (13.01.2011)
+        ------------------
+        
+        - Added cprint function.
+        
+        1.0.1 (13.01.2011)
+        ------------------
+        
+        - Updated README.rst.
+        
+        1.0.0 (13.01.2011)
+        ------------------
+        
+        - Changed license to MIT.
+        - Updated copyright.
+        - Refactored source code.
+        
+        0.2 (07.09.2010)
+        ----------------
+        
+        - Added support of Python 3.x.
+        
+        0.1.2 (04.06.2009)
+        ------------------
+        
+        - Fixed bold characters. (Thanks Tibor Fekete)
+        
+        0.1.1 (05.03.2009)
+        ------------------
+        
+        - Some refactoring.
+        - Updated copyright.
+        - Fixed reset colors.
+        - Updated documentation.
+        
+        0.1 (09.06.2008)
+        ----------------
+        
+        - Initial release.
+        
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Terminals
diff --git a/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/SOURCES.txt b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/SOURCES.txt
new file mode 100644
index 00000000..9dbba440
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/SOURCES.txt
@@ -0,0 +1,8 @@
+COPYING.txt
+README.rst
+setup.py
+termcolor.py
+termcolor.egg-info/PKG-INFO
+termcolor.egg-info/SOURCES.txt
+termcolor.egg-info/dependency_links.txt
+termcolor.egg-info/top_level.txt
\ No newline at end of file
diff --git a/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/dependency_links.txt b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/installed-files.txt b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/installed-files.txt
new file mode 100644
index 00000000..870e3cde
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/installed-files.txt
@@ -0,0 +1,6 @@
+../__pycache__/termcolor.cpython-37.pyc
+../termcolor.py
+PKG-INFO
+SOURCES.txt
+dependency_links.txt
+top_level.txt
diff --git a/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/top_level.txt b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/top_level.txt
new file mode 100644
index 00000000..f08cca14
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor-1.1.0-py3.7.egg-info/top_level.txt
@@ -0,0 +1 @@
+termcolor
diff --git a/venv/lib/python3.7/site-packages/termcolor.py b/venv/lib/python3.7/site-packages/termcolor.py
new file mode 100644
index 00000000..f11b824b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/termcolor.py
@@ -0,0 +1,168 @@
+# coding: utf-8
+# Copyright (c) 2008-2011 Volvox Development Team
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+# Author: Konstantin Lepa <konstantin.lepa@gmail.com>
+
+"""ANSII Color formatting for output in terminal."""
+
+from __future__ import print_function
+import os
+
+
+__ALL__ = [ 'colored', 'cprint' ]
+
+VERSION = (1, 1, 0)
+
+ATTRIBUTES = dict(
+        list(zip([
+            'bold',
+            'dark',
+            '',
+            'underline',
+            'blink',
+            '',
+            'reverse',
+            'concealed'
+            ],
+            list(range(1, 9))
+            ))
+        )
+del ATTRIBUTES['']
+
+
+HIGHLIGHTS = dict(
+        list(zip([
+            'on_grey',
+            'on_red',
+            'on_green',
+            'on_yellow',
+            'on_blue',
+            'on_magenta',
+            'on_cyan',
+            'on_white'
+            ],
+            list(range(40, 48))
+            ))
+        )
+
+
+COLORS = dict(
+        list(zip([
+            'grey',
+            'red',
+            'green',
+            'yellow',
+            'blue',
+            'magenta',
+            'cyan',
+            'white',
+            ],
+            list(range(30, 38))
+            ))
+        )
+
+
+RESET = '\033[0m'
+
+
+def colored(text, color=None, on_color=None, attrs=None):
+    """Colorize text.
+
+    Available text colors:
+        red, green, yellow, blue, magenta, cyan, white.
+
+    Available text highlights:
+        on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
+
+    Available attributes:
+        bold, dark, underline, blink, reverse, concealed.
+
+    Example:
+        colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
+        colored('Hello, World!', 'green')
+    """
+    if os.getenv('ANSI_COLORS_DISABLED') is None:
+        fmt_str = '\033[%dm%s'
+        if color is not None:
+            text = fmt_str % (COLORS[color], text)
+
+        if on_color is not None:
+            text = fmt_str % (HIGHLIGHTS[on_color], text)
+
+        if attrs is not None:
+            for attr in attrs:
+                text = fmt_str % (ATTRIBUTES[attr], text)
+
+        text += RESET
+    return text
+
+
+def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
+    """Print colorize text.
+
+    It accepts arguments of print function.
+    """
+
+    print((colored(text, color, on_color, attrs)), **kwargs)
+
+
+if __name__ == '__main__':
+    print('Current terminal type: %s' % os.getenv('TERM'))
+    print('Test basic colors:')
+    cprint('Grey color', 'grey')
+    cprint('Red color', 'red')
+    cprint('Green color', 'green')
+    cprint('Yellow color', 'yellow')
+    cprint('Blue color', 'blue')
+    cprint('Magenta color', 'magenta')
+    cprint('Cyan color', 'cyan')
+    cprint('White color', 'white')
+    print(('-' * 78))
+
+    print('Test highlights:')
+    cprint('On grey color', on_color='on_grey')
+    cprint('On red color', on_color='on_red')
+    cprint('On green color', on_color='on_green')
+    cprint('On yellow color', on_color='on_yellow')
+    cprint('On blue color', on_color='on_blue')
+    cprint('On magenta color', on_color='on_magenta')
+    cprint('On cyan color', on_color='on_cyan')
+    cprint('On white color', color='grey', on_color='on_white')
+    print('-' * 78)
+
+    print('Test attributes:')
+    cprint('Bold grey color', 'grey', attrs=['bold'])
+    cprint('Dark red color', 'red', attrs=['dark'])
+    cprint('Underline green color', 'green', attrs=['underline'])
+    cprint('Blink yellow color', 'yellow', attrs=['blink'])
+    cprint('Reversed blue color', 'blue', attrs=['reverse'])
+    cprint('Concealed Magenta color', 'magenta', attrs=['concealed'])
+    cprint('Bold underline reverse cyan color', 'cyan',
+            attrs=['bold', 'underline', 'reverse'])
+    cprint('Dark blink concealed white color', 'white',
+            attrs=['dark', 'blink', 'concealed'])
+    print(('-' * 78))
+
+    print('Test mixing:')
+    cprint('Underline red on grey color', 'red', 'on_grey',
+            ['underline'])
+    cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
+
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/LICENSE b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/LICENSE
new file mode 100644
index 00000000..583f9f6e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/LICENSE
@@ -0,0 +1,254 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com).  In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property.  Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2 and above   2.1.1       2001-now    PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/METADATA b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/METADATA
new file mode 100644
index 00000000..ee6d0e72
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/METADATA
@@ -0,0 +1,43 @@
+Metadata-Version: 2.1
+Name: typing-extensions
+Version: 3.7.4.3
+Summary: Backported and Experimental Type Hints for Python 3.5+
+Home-page: https://github.com/python/typing/blob/master/typing_extensions/README.rst
+Author: Guido van Rossum, Jukka Lehtosalo, Lukasz Langa, Michael Lee
+Author-email: levkivskyi@gmail.com
+License: PSF
+Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Software Development
+Requires-Dist: typing (>=3.7.4) ; python_version < "3.5"
+
+Typing Extensions -- Backported and Experimental Type Hints for Python
+
+The ``typing`` module was added to the standard library in Python 3.5 on
+a provisional basis and will no longer be provisional in Python 3.7. However,
+this means users of Python 3.5 - 3.6 who are unable to upgrade will not be
+able to take advantage of new types added to the ``typing`` module, such as
+``typing.Text`` or ``typing.Coroutine``.
+
+The ``typing_extensions`` module contains both backports of these changes
+as well as experimental types that will eventually be added to the ``typing``
+module, such as ``Protocol`` or ``TypedDict``.
+
+Users of other Python versions should continue to install and use
+the ``typing`` module from PyPi instead of using this one unless specifically
+writing code that must be compatible with multiple Python versions or requires
+experimental types.
+
+
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/RECORD b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/RECORD
new file mode 100644
index 00000000..a5d318d7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/typing_extensions.cpython-37.pyc,,
+typing_extensions-3.7.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typing_extensions-3.7.4.3.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
+typing_extensions-3.7.4.3.dist-info/METADATA,sha256=ZHHy0ceuecxgjj7eY65f-I_dN1m3u2BgGVbnc9lgg78,2020
+typing_extensions-3.7.4.3.dist-info/RECORD,,
+typing_extensions-3.7.4.3.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
+typing_extensions-3.7.4.3.dist-info/top_level.txt,sha256=hkDmk3VmrfXPOD--jS4aKTCu6kFZo-kVT1cIFfq1eU8,18
+typing_extensions.py,sha256=MX7Db0Dze7Y5_V7HHFajAbf8IPuvw2ZS07o_sfpBOE8,83727
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/WHEEL b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/WHEEL
new file mode 100644
index 00000000..3b5c4038
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/top_level.txt
new file mode 100644
index 00000000..5fd4f05f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions-3.7.4.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+typing_extensions
diff --git a/venv/lib/python3.7/site-packages/typing_extensions.py b/venv/lib/python3.7/site-packages/typing_extensions.py
new file mode 100644
index 00000000..a6f4281b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/typing_extensions.py
@@ -0,0 +1,2168 @@
+import abc
+import collections
+import contextlib
+import sys
+import typing
+import collections.abc as collections_abc
+import operator
+
+# These are used by Protocol implementation
+# We use internal typing helpers here, but this significantly reduces
+# code duplication. (Also this is only until Protocol is in typing.)
+from typing import Generic, Callable, TypeVar, Tuple
+
+# After PEP 560, internal typing API was substantially reworked.
+# This is especially important for Protocol class which uses internal APIs
+# quite extensivelly.
+PEP_560 = sys.version_info[:3] >= (3, 7, 0)
+
+if PEP_560:
+    GenericMeta = TypingMeta = type
+else:
+    from typing import GenericMeta, TypingMeta
+OLD_GENERICS = False
+try:
+    from typing import _type_vars, _next_in_mro, _type_check
+except ImportError:
+    OLD_GENERICS = True
+try:
+    from typing import _subs_tree  # noqa
+    SUBS_TREE = True
+except ImportError:
+    SUBS_TREE = False
+try:
+    from typing import _tp_cache
+except ImportError:
+    def _tp_cache(x):
+        return x
+try:
+    from typing import _TypingEllipsis, _TypingEmpty
+except ImportError:
+    class _TypingEllipsis:
+        pass
+
+    class _TypingEmpty:
+        pass
+
+
+# The two functions below are copies of typing internal helpers.
+# They are needed by _ProtocolMeta
+
+
+def _no_slots_copy(dct):
+    dict_copy = dict(dct)
+    if '__slots__' in dict_copy:
+        for slot in dict_copy['__slots__']:
+            dict_copy.pop(slot, None)
+    return dict_copy
+
+
+def _check_generic(cls, parameters):
+    if not cls.__parameters__:
+        raise TypeError("%s is not a generic class" % repr(cls))
+    alen = len(parameters)
+    elen = len(cls.__parameters__)
+    if alen != elen:
+        raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+                        ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+if hasattr(typing, '_generic_new'):
+    _generic_new = typing._generic_new
+else:
+    # Note: The '_generic_new(...)' function is used as a part of the
+    # process of creating a generic type and was added to the typing module
+    # as of Python 3.5.3.
+    #
+    # We've defined '_generic_new(...)' below to exactly match the behavior
+    # implemented in older versions of 'typing' bundled with Python 3.5.0 to
+    # 3.5.2. This helps eliminate redundancy when defining collection types
+    # like 'Deque' later.
+    #
+    # See https://github.com/python/typing/pull/308 for more details -- in
+    # particular, compare and contrast the definition of types like
+    # 'typing.List' before and after the merge.
+
+    def _generic_new(base_cls, cls, *args, **kwargs):
+        return base_cls.__new__(cls, *args, **kwargs)
+
+# See https://github.com/python/typing/pull/439
+if hasattr(typing, '_geqv'):
+    from typing import _geqv
+    _geqv_defined = True
+else:
+    _geqv = None
+    _geqv_defined = False
+
+if sys.version_info[:2] >= (3, 6):
+    import _collections_abc
+    _check_methods_in_mro = _collections_abc._check_methods
+else:
+    def _check_methods_in_mro(C, *methods):
+        mro = C.__mro__
+        for method in methods:
+            for B in mro:
+                if method in B.__dict__:
+                    if B.__dict__[method] is None:
+                        return NotImplemented
+                    break
+            else:
+                return NotImplemented
+        return True
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+    # Super-special typing primitives.
+    'ClassVar',
+    'Final',
+    'Type',
+
+    # ABCs (from collections.abc).
+    # The following are added depending on presence
+    # of their non-generic counterparts in stdlib:
+    # 'Awaitable',
+    # 'AsyncIterator',
+    # 'AsyncIterable',
+    # 'Coroutine',
+    # 'AsyncGenerator',
+    # 'AsyncContextManager',
+    # 'ChainMap',
+
+    # Concrete collection types.
+    'ContextManager',
+    'Counter',
+    'Deque',
+    'DefaultDict',
+    'TypedDict',
+
+    # Structural checks, a.k.a. protocols.
+    'SupportsIndex',
+
+    # One-off things.
+    'final',
+    'IntVar',
+    'Literal',
+    'NewType',
+    'overload',
+    'Text',
+    'TYPE_CHECKING',
+]
+
+# Annotated relies on substitution trees of pep 560. It will not work for
+# versions of typing older than 3.5.3
+HAVE_ANNOTATED = PEP_560 or SUBS_TREE
+
+if PEP_560:
+    __all__.extend(["get_args", "get_origin", "get_type_hints"])
+
+if HAVE_ANNOTATED:
+    __all__.append("Annotated")
+
+# Protocols are hard to backport to the original version of typing 3.5.0
+HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0)
+
+if HAVE_PROTOCOLS:
+    __all__.extend(['Protocol', 'runtime', 'runtime_checkable'])
+
+
+# TODO
+if hasattr(typing, 'NoReturn'):
+    NoReturn = typing.NoReturn
+elif hasattr(typing, '_FinalTypingBase'):
+    class _NoReturn(typing._FinalTypingBase, _root=True):
+        """Special type indicating functions that never return.
+        Example::
+
+          from typing import NoReturn
+
+          def stop() -> NoReturn:
+              raise Exception('no way')
+
+        This type is invalid in other positions, e.g., ``List[NoReturn]``
+        will fail in static type checkers.
+        """
+        __slots__ = ()
+
+        def __instancecheck__(self, obj):
+            raise TypeError("NoReturn cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("NoReturn cannot be used with issubclass().")
+
+    NoReturn = _NoReturn(_root=True)
+else:
+    class _NoReturnMeta(typing.TypingMeta):
+        """Metaclass for NoReturn"""
+        def __new__(cls, name, bases, namespace, _root=False):
+            return super().__new__(cls, name, bases, namespace, _root=_root)
+
+        def __instancecheck__(self, obj):
+            raise TypeError("NoReturn cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("NoReturn cannot be used with issubclass().")
+
+    class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True):
+        """Special type indicating functions that never return.
+        Example::
+
+          from typing import NoReturn
+
+          def stop() -> NoReturn:
+              raise Exception('no way')
+
+        This type is invalid in other positions, e.g., ``List[NoReturn]``
+        will fail in static type checkers.
+        """
+        __slots__ = ()
+
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T')  # Any type.
+KT = typing.TypeVar('KT')  # Key type.
+VT = typing.TypeVar('VT')  # Value type.
+T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
+V_co = typing.TypeVar('V_co', covariant=True)  # Any type covariant containers.
+VT_co = typing.TypeVar('VT_co', covariant=True)  # Value type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+
+if hasattr(typing, 'ClassVar'):
+    ClassVar = typing.ClassVar
+elif hasattr(typing, '_FinalTypingBase'):
+    class _ClassVar(typing._FinalTypingBase, _root=True):
+        """Special type construct to mark class variables.
+
+        An annotation wrapped in ClassVar indicates that a given
+        attribute is intended to be used as a class variable and
+        should not be set on instances of that class. Usage::
+
+          class Starship:
+              stats: ClassVar[Dict[str, int]] = {} # class variable
+              damage: int = 10                     # instance variable
+
+        ClassVar accepts only types and cannot be further subscribed.
+
+        Note that ClassVar is not a class itself, and should not
+        be used with isinstance() or issubclass().
+        """
+
+        __slots__ = ('__type__',)
+
+        def __init__(self, tp=None, **kwds):
+            self.__type__ = tp
+
+        def __getitem__(self, item):
+            cls = type(self)
+            if self.__type__ is None:
+                return cls(typing._type_check(item,
+                           '{} accepts only single type.'.format(cls.__name__[1:])),
+                           _root=True)
+            raise TypeError('{} cannot be further subscripted'
+                            .format(cls.__name__[1:]))
+
+        def _eval_type(self, globalns, localns):
+            new_tp = typing._eval_type(self.__type__, globalns, localns)
+            if new_tp == self.__type__:
+                return self
+            return type(self)(new_tp, _root=True)
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__type__ is not None:
+                r += '[{}]'.format(typing._type_repr(self.__type__))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__type__))
+
+        def __eq__(self, other):
+            if not isinstance(other, _ClassVar):
+                return NotImplemented
+            if self.__type__ is not None:
+                return self.__type__ == other.__type__
+            return self is other
+
+    ClassVar = _ClassVar(_root=True)
+else:
+    class _ClassVarMeta(typing.TypingMeta):
+        """Metaclass for ClassVar"""
+
+        def __new__(cls, name, bases, namespace, tp=None, _root=False):
+            self = super().__new__(cls, name, bases, namespace, _root=_root)
+            if tp is not None:
+                self.__type__ = tp
+            return self
+
+        def __instancecheck__(self, obj):
+            raise TypeError("ClassVar cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("ClassVar cannot be used with issubclass().")
+
+        def __getitem__(self, item):
+            cls = type(self)
+            if self.__type__ is not None:
+                raise TypeError('{} cannot be further subscripted'
+                                .format(cls.__name__[1:]))
+
+            param = typing._type_check(
+                item,
+                '{} accepts only single type.'.format(cls.__name__[1:]))
+            return cls(self.__name__, self.__bases__,
+                       dict(self.__dict__), tp=param, _root=True)
+
+        def _eval_type(self, globalns, localns):
+            new_tp = typing._eval_type(self.__type__, globalns, localns)
+            if new_tp == self.__type__:
+                return self
+            return type(self)(self.__name__, self.__bases__,
+                              dict(self.__dict__), tp=self.__type__,
+                              _root=True)
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__type__ is not None:
+                r += '[{}]'.format(typing._type_repr(self.__type__))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__type__))
+
+        def __eq__(self, other):
+            if not isinstance(other, ClassVar):
+                return NotImplemented
+            if self.__type__ is not None:
+                return self.__type__ == other.__type__
+            return self is other
+
+    class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True):
+        """Special type construct to mark class variables.
+
+        An annotation wrapped in ClassVar indicates that a given
+        attribute is intended to be used as a class variable and
+        should not be set on instances of that class. Usage::
+
+          class Starship:
+              stats: ClassVar[Dict[str, int]] = {} # class variable
+              damage: int = 10                     # instance variable
+
+        ClassVar accepts only types and cannot be further subscribed.
+
+        Note that ClassVar is not a class itself, and should not
+        be used with isinstance() or issubclass().
+        """
+
+        __type__ = None
+
+# On older versions of typing there is an internal class named "Final".
+if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
+    Final = typing.Final
+elif sys.version_info[:2] >= (3, 7):
+    class _FinalForm(typing._SpecialForm, _root=True):
+
+        def __repr__(self):
+            return 'typing_extensions.' + self._name
+
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      '{} accepts only single type'.format(self._name))
+            return _GenericAlias(self, (item,))
+
+    Final = _FinalForm('Final',
+                       doc="""A special typing construct to indicate that a name
+                       cannot be re-assigned or overridden in a subclass.
+                       For example:
+
+                           MAX_SIZE: Final = 9000
+                           MAX_SIZE += 1  # Error reported by type checker
+
+                           class Connection:
+                               TIMEOUT: Final[int] = 10
+                           class FastConnector(Connection):
+                               TIMEOUT = 1  # Error reported by type checker
+
+                       There is no runtime checking of these properties.""")
+elif hasattr(typing, '_FinalTypingBase'):
+    class _Final(typing._FinalTypingBase, _root=True):
+        """A special typing construct to indicate that a name
+        cannot be re-assigned or overridden in a subclass.
+        For example:
+
+            MAX_SIZE: Final = 9000
+            MAX_SIZE += 1  # Error reported by type checker
+
+            class Connection:
+                TIMEOUT: Final[int] = 10
+            class FastConnector(Connection):
+                TIMEOUT = 1  # Error reported by type checker
+
+        There is no runtime checking of these properties.
+        """
+
+        __slots__ = ('__type__',)
+
+        def __init__(self, tp=None, **kwds):
+            self.__type__ = tp
+
+        def __getitem__(self, item):
+            cls = type(self)
+            if self.__type__ is None:
+                return cls(typing._type_check(item,
+                           '{} accepts only single type.'.format(cls.__name__[1:])),
+                           _root=True)
+            raise TypeError('{} cannot be further subscripted'
+                            .format(cls.__name__[1:]))
+
+        def _eval_type(self, globalns, localns):
+            new_tp = typing._eval_type(self.__type__, globalns, localns)
+            if new_tp == self.__type__:
+                return self
+            return type(self)(new_tp, _root=True)
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__type__ is not None:
+                r += '[{}]'.format(typing._type_repr(self.__type__))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__type__))
+
+        def __eq__(self, other):
+            if not isinstance(other, _Final):
+                return NotImplemented
+            if self.__type__ is not None:
+                return self.__type__ == other.__type__
+            return self is other
+
+    Final = _Final(_root=True)
+else:
+    class _FinalMeta(typing.TypingMeta):
+        """Metaclass for Final"""
+
+        def __new__(cls, name, bases, namespace, tp=None, _root=False):
+            self = super().__new__(cls, name, bases, namespace, _root=_root)
+            if tp is not None:
+                self.__type__ = tp
+            return self
+
+        def __instancecheck__(self, obj):
+            raise TypeError("Final cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("Final cannot be used with issubclass().")
+
+        def __getitem__(self, item):
+            cls = type(self)
+            if self.__type__ is not None:
+                raise TypeError('{} cannot be further subscripted'
+                                .format(cls.__name__[1:]))
+
+            param = typing._type_check(
+                item,
+                '{} accepts only single type.'.format(cls.__name__[1:]))
+            return cls(self.__name__, self.__bases__,
+                       dict(self.__dict__), tp=param, _root=True)
+
+        def _eval_type(self, globalns, localns):
+            new_tp = typing._eval_type(self.__type__, globalns, localns)
+            if new_tp == self.__type__:
+                return self
+            return type(self)(self.__name__, self.__bases__,
+                              dict(self.__dict__), tp=self.__type__,
+                              _root=True)
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__type__ is not None:
+                r += '[{}]'.format(typing._type_repr(self.__type__))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__type__))
+
+        def __eq__(self, other):
+            if not isinstance(other, Final):
+                return NotImplemented
+            if self.__type__ is not None:
+                return self.__type__ == other.__type__
+            return self is other
+
+    class Final(typing.Final, metaclass=_FinalMeta, _root=True):
+        """A special typing construct to indicate that a name
+        cannot be re-assigned or overridden in a subclass.
+        For example:
+
+            MAX_SIZE: Final = 9000
+            MAX_SIZE += 1  # Error reported by type checker
+
+            class Connection:
+                TIMEOUT: Final[int] = 10
+            class FastConnector(Connection):
+                TIMEOUT = 1  # Error reported by type checker
+
+        There is no runtime checking of these properties.
+        """
+
+        __type__ = None
+
+
+if hasattr(typing, 'final'):
+    final = typing.final
+else:
+    def final(f):
+        """This decorator can be used to indicate to type checkers that
+        the decorated method cannot be overridden, and decorated class
+        cannot be subclassed. For example:
+
+            class Base:
+                @final
+                def done(self) -> None:
+                    ...
+            class Sub(Base):
+                def done(self) -> None:  # Error reported by type checker
+                    ...
+            @final
+            class Leaf:
+                ...
+            class Other(Leaf):  # Error reported by type checker
+                ...
+
+        There is no runtime checking of these properties.
+        """
+        return f
+
+
+def IntVar(name):
+    return TypeVar(name)
+
+
+if hasattr(typing, 'Literal'):
+    Literal = typing.Literal
+elif sys.version_info[:2] >= (3, 7):
+    class _LiteralForm(typing._SpecialForm, _root=True):
+
+        def __repr__(self):
+            return 'typing_extensions.' + self._name
+
+        def __getitem__(self, parameters):
+            return _GenericAlias(self, parameters)
+
+    Literal = _LiteralForm('Literal',
+                           doc="""A type that can be used to indicate to type checkers
+                           that the corresponding value has a value literally equivalent
+                           to the provided parameter. For example:
+
+                               var: Literal[4] = 4
+
+                           The type checker understands that 'var' is literally equal to
+                           the value 4 and no other value.
+
+                           Literal[...] cannot be subclassed. There is no runtime
+                           checking verifying that the parameter is actually a value
+                           instead of a type.""")
+elif hasattr(typing, '_FinalTypingBase'):
+    class _Literal(typing._FinalTypingBase, _root=True):
+        """A type that can be used to indicate to type checkers that the
+        corresponding value has a value literally equivalent to the
+        provided parameter. For example:
+
+            var: Literal[4] = 4
+
+        The type checker understands that 'var' is literally equal to the
+        value 4 and no other value.
+
+        Literal[...] cannot be subclassed. There is no runtime checking
+        verifying that the parameter is actually a value instead of a type.
+        """
+
+        __slots__ = ('__values__',)
+
+        def __init__(self, values=None, **kwds):
+            self.__values__ = values
+
+        def __getitem__(self, values):
+            cls = type(self)
+            if self.__values__ is None:
+                if not isinstance(values, tuple):
+                    values = (values,)
+                return cls(values, _root=True)
+            raise TypeError('{} cannot be further subscripted'
+                            .format(cls.__name__[1:]))
+
+        def _eval_type(self, globalns, localns):
+            return self
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__values__ is not None:
+                r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__values__))
+
+        def __eq__(self, other):
+            if not isinstance(other, _Literal):
+                return NotImplemented
+            if self.__values__ is not None:
+                return self.__values__ == other.__values__
+            return self is other
+
+    Literal = _Literal(_root=True)
+else:
+    class _LiteralMeta(typing.TypingMeta):
+        """Metaclass for Literal"""
+
+        def __new__(cls, name, bases, namespace, values=None, _root=False):
+            self = super().__new__(cls, name, bases, namespace, _root=_root)
+            if values is not None:
+                self.__values__ = values
+            return self
+
+        def __instancecheck__(self, obj):
+            raise TypeError("Literal cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("Literal cannot be used with issubclass().")
+
+        def __getitem__(self, item):
+            cls = type(self)
+            if self.__values__ is not None:
+                raise TypeError('{} cannot be further subscripted'
+                                .format(cls.__name__[1:]))
+
+            if not isinstance(item, tuple):
+                item = (item,)
+            return cls(self.__name__, self.__bases__,
+                       dict(self.__dict__), values=item, _root=True)
+
+        def _eval_type(self, globalns, localns):
+            return self
+
+        def __repr__(self):
+            r = super().__repr__()
+            if self.__values__ is not None:
+                r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
+            return r
+
+        def __hash__(self):
+            return hash((type(self).__name__, self.__values__))
+
+        def __eq__(self, other):
+            if not isinstance(other, Literal):
+                return NotImplemented
+            if self.__values__ is not None:
+                return self.__values__ == other.__values__
+            return self is other
+
+    class Literal(typing.Final, metaclass=_LiteralMeta, _root=True):
+        """A type that can be used to indicate to type checkers that the
+        corresponding value has a value literally equivalent to the
+        provided parameter. For example:
+
+            var: Literal[4] = 4
+
+        The type checker understands that 'var' is literally equal to the
+        value 4 and no other value.
+
+        Literal[...] cannot be subclassed. There is no runtime checking
+        verifying that the parameter is actually a value instead of a type.
+        """
+
+        __values__ = None
+
+
+def _overload_dummy(*args, **kwds):
+    """Helper for @overload to raise when called."""
+    raise NotImplementedError(
+        "You should not call an overloaded function. "
+        "A series of @overload-decorated functions "
+        "outside a stub module should always be followed "
+        "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+    """Decorator for overloaded functions/methods.
+
+    In a stub file, place two or more stub definitions for the same
+    function in a row, each decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+
+    In a non-stub file (i.e. a regular .py file), do the same but
+    follow it with an implementation.  The implementation should *not*
+    be decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+      def utf8(value):
+          # implementation goes here
+    """
+    return _overload_dummy
+
+
+# This is not a real generic class.  Don't use outside annotations.
+if hasattr(typing, 'Type'):
+    Type = typing.Type
+else:
+    # Internal type variable used for Type[].
+    CT_co = typing.TypeVar('CT_co', covariant=True, bound=type)
+
+    class Type(typing.Generic[CT_co], extra=type):
+        """A special construct usable to annotate class objects.
+
+        For example, suppose we have the following classes::
+
+          class User: ...  # Abstract base for User classes
+          class BasicUser(User): ...
+          class ProUser(User): ...
+          class TeamUser(User): ...
+
+        And a function that takes a class argument that's a subclass of
+        User and returns an instance of the corresponding class::
+
+          U = TypeVar('U', bound=User)
+          def new_user(user_class: Type[U]) -> U:
+              user = user_class()
+              # (Here we could write the user object to a database)
+              return user
+          joe = new_user(BasicUser)
+
+        At this point the type checker knows that joe has type BasicUser.
+        """
+
+        __slots__ = ()
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+def _define_guard(type_name):
+    """
+    Returns True if the given type isn't defined in typing but
+    is defined in collections_abc.
+
+    Adds the type to __all__ if the collection is found in either
+    typing or collection_abc.
+    """
+    if hasattr(typing, type_name):
+        __all__.append(type_name)
+        globals()[type_name] = getattr(typing, type_name)
+        return False
+    elif hasattr(collections_abc, type_name):
+        __all__.append(type_name)
+        return True
+    else:
+        return False
+
+
+class _ExtensionsGenericMeta(GenericMeta):
+    def __subclasscheck__(self, subclass):
+        """This mimics a more modern GenericMeta.__subclasscheck__() logic
+        (that does not have problems with recursion) to work around interactions
+        between collections, typing, and typing_extensions on older
+        versions of Python, see https://github.com/python/typing/issues/501.
+        """
+        if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0):
+            if self.__origin__ is not None:
+                if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
+                    raise TypeError("Parameterized generics cannot be used with class "
+                                    "or instance checks")
+                return False
+        if not self.__extra__:
+            return super().__subclasscheck__(subclass)
+        res = self.__extra__.__subclasshook__(subclass)
+        if res is not NotImplemented:
+            return res
+        if self.__extra__ in subclass.__mro__:
+            return True
+        for scls in self.__extra__.__subclasses__():
+            if isinstance(scls, GenericMeta):
+                continue
+            if issubclass(subclass, scls):
+                return True
+        return False
+
+
+if _define_guard('Awaitable'):
+    class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta,
+                    extra=collections_abc.Awaitable):
+        __slots__ = ()
+
+
+if _define_guard('Coroutine'):
+    class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co],
+                    metaclass=_ExtensionsGenericMeta,
+                    extra=collections_abc.Coroutine):
+        __slots__ = ()
+
+
+if _define_guard('AsyncIterable'):
+    class AsyncIterable(typing.Generic[T_co],
+                        metaclass=_ExtensionsGenericMeta,
+                        extra=collections_abc.AsyncIterable):
+        __slots__ = ()
+
+
+if _define_guard('AsyncIterator'):
+    class AsyncIterator(AsyncIterable[T_co],
+                        metaclass=_ExtensionsGenericMeta,
+                        extra=collections_abc.AsyncIterator):
+        __slots__ = ()
+
+
+if hasattr(typing, 'Deque'):
+    Deque = typing.Deque
+elif _geqv_defined:
+    class Deque(collections.deque, typing.MutableSequence[T],
+                metaclass=_ExtensionsGenericMeta,
+                extra=collections.deque):
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if _geqv(cls, Deque):
+                return collections.deque(*args, **kwds)
+            return _generic_new(collections.deque, cls, *args, **kwds)
+else:
+    class Deque(collections.deque, typing.MutableSequence[T],
+                metaclass=_ExtensionsGenericMeta,
+                extra=collections.deque):
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if cls._gorg is Deque:
+                return collections.deque(*args, **kwds)
+            return _generic_new(collections.deque, cls, *args, **kwds)
+
+
+if hasattr(typing, 'ContextManager'):
+    ContextManager = typing.ContextManager
+elif hasattr(contextlib, 'AbstractContextManager'):
+    class ContextManager(typing.Generic[T_co],
+                         metaclass=_ExtensionsGenericMeta,
+                         extra=contextlib.AbstractContextManager):
+        __slots__ = ()
+else:
+    class ContextManager(typing.Generic[T_co]):
+        __slots__ = ()
+
+        def __enter__(self):
+            return self
+
+        @abc.abstractmethod
+        def __exit__(self, exc_type, exc_value, traceback):
+            return None
+
+        @classmethod
+        def __subclasshook__(cls, C):
+            if cls is ContextManager:
+                # In Python 3.6+, it is possible to set a method to None to
+                # explicitly indicate that the class does not implement an ABC
+                # (https://bugs.python.org/issue25958), but we do not support
+                # that pattern here because this fallback class is only used
+                # in Python 3.5 and earlier.
+                if (any("__enter__" in B.__dict__ for B in C.__mro__) and
+                    any("__exit__" in B.__dict__ for B in C.__mro__)):
+                    return True
+            return NotImplemented
+
+
+if hasattr(typing, 'AsyncContextManager'):
+    AsyncContextManager = typing.AsyncContextManager
+    __all__.append('AsyncContextManager')
+elif hasattr(contextlib, 'AbstractAsyncContextManager'):
+    class AsyncContextManager(typing.Generic[T_co],
+                              metaclass=_ExtensionsGenericMeta,
+                              extra=contextlib.AbstractAsyncContextManager):
+        __slots__ = ()
+
+    __all__.append('AsyncContextManager')
+elif sys.version_info[:2] >= (3, 5):
+    exec("""
+class AsyncContextManager(typing.Generic[T_co]):
+    __slots__ = ()
+
+    async def __aenter__(self):
+        return self
+
+    @abc.abstractmethod
+    async def __aexit__(self, exc_type, exc_value, traceback):
+        return None
+
+    @classmethod
+    def __subclasshook__(cls, C):
+        if cls is AsyncContextManager:
+            return _check_methods_in_mro(C, "__aenter__", "__aexit__")
+        return NotImplemented
+
+__all__.append('AsyncContextManager')
+""")
+
+
+if hasattr(typing, 'DefaultDict'):
+    DefaultDict = typing.DefaultDict
+elif _geqv_defined:
+    class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
+                      metaclass=_ExtensionsGenericMeta,
+                      extra=collections.defaultdict):
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if _geqv(cls, DefaultDict):
+                return collections.defaultdict(*args, **kwds)
+            return _generic_new(collections.defaultdict, cls, *args, **kwds)
+else:
+    class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
+                      metaclass=_ExtensionsGenericMeta,
+                      extra=collections.defaultdict):
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if cls._gorg is DefaultDict:
+                return collections.defaultdict(*args, **kwds)
+            return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+
+if hasattr(typing, 'Counter'):
+    Counter = typing.Counter
+elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1):
+    assert _geqv_defined
+    _TInt = typing.TypeVar('_TInt')
+
+    class _CounterMeta(typing.GenericMeta):
+        """Metaclass for Counter"""
+        def __getitem__(self, item):
+            return super().__getitem__((item, int))
+
+    class Counter(collections.Counter,
+                  typing.Dict[T, int],
+                  metaclass=_CounterMeta,
+                  extra=collections.Counter):
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if _geqv(cls, Counter):
+                return collections.Counter(*args, **kwds)
+            return _generic_new(collections.Counter, cls, *args, **kwds)
+
+elif _geqv_defined:
+    class Counter(collections.Counter,
+                  typing.Dict[T, int],
+                  metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if _geqv(cls, Counter):
+                return collections.Counter(*args, **kwds)
+            return _generic_new(collections.Counter, cls, *args, **kwds)
+
+else:
+    class Counter(collections.Counter,
+                  typing.Dict[T, int],
+                  metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwds):
+            if cls._gorg is Counter:
+                return collections.Counter(*args, **kwds)
+            return _generic_new(collections.Counter, cls, *args, **kwds)
+
+
+if hasattr(typing, 'ChainMap'):
+    ChainMap = typing.ChainMap
+    __all__.append('ChainMap')
+elif hasattr(collections, 'ChainMap'):
+    # ChainMap only exists in 3.3+
+    if _geqv_defined:
+        class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
+                       metaclass=_ExtensionsGenericMeta,
+                       extra=collections.ChainMap):
+
+            __slots__ = ()
+
+            def __new__(cls, *args, **kwds):
+                if _geqv(cls, ChainMap):
+                    return collections.ChainMap(*args, **kwds)
+                return _generic_new(collections.ChainMap, cls, *args, **kwds)
+    else:
+        class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
+                       metaclass=_ExtensionsGenericMeta,
+                       extra=collections.ChainMap):
+
+            __slots__ = ()
+
+            def __new__(cls, *args, **kwds):
+                if cls._gorg is ChainMap:
+                    return collections.ChainMap(*args, **kwds)
+                return _generic_new(collections.ChainMap, cls, *args, **kwds)
+
+    __all__.append('ChainMap')
+
+
+if _define_guard('AsyncGenerator'):
+    class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
+                         metaclass=_ExtensionsGenericMeta,
+                         extra=collections_abc.AsyncGenerator):
+        __slots__ = ()
+
+
+if hasattr(typing, 'NewType'):
+    NewType = typing.NewType
+else:
+    def NewType(name, tp):
+        """NewType creates simple unique types with almost zero
+        runtime overhead. NewType(name, tp) is considered a subtype of tp
+        by static type checkers. At runtime, NewType(name, tp) returns
+        a dummy function that simply returns its argument. Usage::
+
+            UserId = NewType('UserId', int)
+
+            def name_by_id(user_id: UserId) -> str:
+                ...
+
+            UserId('user')          # Fails type check
+
+            name_by_id(42)          # Fails type check
+            name_by_id(UserId(42))  # OK
+
+            num = UserId(5) + 1     # type: int
+        """
+
+        def new_type(x):
+            return x
+
+        new_type.__name__ = name
+        new_type.__supertype__ = tp
+        return new_type
+
+
+if hasattr(typing, 'Text'):
+    Text = typing.Text
+else:
+    Text = str
+
+
+if hasattr(typing, 'TYPE_CHECKING'):
+    TYPE_CHECKING = typing.TYPE_CHECKING
+else:
+    # Constant that's True when type checking, but False here.
+    TYPE_CHECKING = False
+
+
+def _gorg(cls):
+    """This function exists for compatibility with old typing versions."""
+    assert isinstance(cls, GenericMeta)
+    if hasattr(cls, '_gorg'):
+        return cls._gorg
+    while cls.__origin__ is not None:
+        cls = cls.__origin__
+    return cls
+
+
+if OLD_GENERICS:
+    def _next_in_mro(cls):  # noqa
+        """This function exists for compatibility with old typing versions."""
+        next_in_mro = object
+        for i, c in enumerate(cls.__mro__[:-1]):
+            if isinstance(c, GenericMeta) and _gorg(c) is Generic:
+                next_in_mro = cls.__mro__[i + 1]
+        return next_in_mro
+
+
+_PROTO_WHITELIST = ['Callable', 'Awaitable',
+                    'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
+                    'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
+                    'ContextManager', 'AsyncContextManager']
+
+
+def _get_protocol_attrs(cls):
+    attrs = set()
+    for base in cls.__mro__[:-1]:  # without object
+        if base.__name__ in ('Protocol', 'Generic'):
+            continue
+        annotations = getattr(base, '__annotations__', {})
+        for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+            if (not attr.startswith('_abc_') and attr not in (
+                    '__abstractmethods__', '__annotations__', '__weakref__',
+                    '_is_protocol', '_is_runtime_protocol', '__dict__',
+                    '__args__', '__slots__',
+                    '__next_in_mro__', '__parameters__', '__origin__',
+                    '__orig_bases__', '__extra__', '__tree_hash__',
+                    '__doc__', '__subclasshook__', '__init__', '__new__',
+                    '__module__', '_MutableMapping__marker', '_gorg')):
+                attrs.add(attr)
+    return attrs
+
+
+def _is_callable_members_only(cls):
+    return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
+
+
+if hasattr(typing, 'Protocol'):
+    Protocol = typing.Protocol
+elif HAVE_PROTOCOLS and not PEP_560:
+    class _ProtocolMeta(GenericMeta):
+        """Internal metaclass for Protocol.
+
+        This exists so Protocol classes can be generic without deriving
+        from Generic.
+        """
+        if not OLD_GENERICS:
+            def __new__(cls, name, bases, namespace,
+                        tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+                # This is just a version copied from GenericMeta.__new__ that
+                # includes "Protocol" special treatment. (Comments removed for brevity.)
+                assert extra is None  # Protocols should not have extra
+                if tvars is not None:
+                    assert origin is not None
+                    assert all(isinstance(t, TypeVar) for t in tvars), tvars
+                else:
+                    tvars = _type_vars(bases)
+                    gvars = None
+                    for base in bases:
+                        if base is Generic:
+                            raise TypeError("Cannot inherit from plain Generic")
+                        if (isinstance(base, GenericMeta) and
+                                base.__origin__ in (Generic, Protocol)):
+                            if gvars is not None:
+                                raise TypeError(
+                                    "Cannot inherit from Generic[...] or"
+                                    " Protocol[...] multiple times.")
+                            gvars = base.__parameters__
+                    if gvars is None:
+                        gvars = tvars
+                    else:
+                        tvarset = set(tvars)
+                        gvarset = set(gvars)
+                        if not tvarset <= gvarset:
+                            raise TypeError(
+                                "Some type variables (%s) "
+                                "are not listed in %s[%s]" %
+                                (", ".join(str(t) for t in tvars if t not in gvarset),
+                                 "Generic" if any(b.__origin__ is Generic
+                                                  for b in bases) else "Protocol",
+                                 ", ".join(str(g) for g in gvars)))
+                        tvars = gvars
+
+                initial_bases = bases
+                if (extra is not None and type(extra) is abc.ABCMeta and
+                        extra not in bases):
+                    bases = (extra,) + bases
+                bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
+                              for b in bases)
+                if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+                    bases = tuple(b for b in bases if b is not Generic)
+                namespace.update({'__origin__': origin, '__extra__': extra})
+                self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
+                                                       _root=True)
+                super(GenericMeta, self).__setattr__('_gorg',
+                                                     self if not origin else
+                                                     _gorg(origin))
+                self.__parameters__ = tvars
+                self.__args__ = tuple(... if a is _TypingEllipsis else
+                                      () if a is _TypingEmpty else
+                                      a for a in args) if args else None
+                self.__next_in_mro__ = _next_in_mro(self)
+                if orig_bases is None:
+                    self.__orig_bases__ = initial_bases
+                elif origin is not None:
+                    self._abc_registry = origin._abc_registry
+                    self._abc_cache = origin._abc_cache
+                if hasattr(self, '_subs_tree'):
+                    self.__tree_hash__ = (hash(self._subs_tree()) if origin else
+                                          super(GenericMeta, self).__hash__())
+                return self
+
+        def __init__(cls, *args, **kwargs):
+            super().__init__(*args, **kwargs)
+            if not cls.__dict__.get('_is_protocol', None):
+                cls._is_protocol = any(b is Protocol or
+                                       isinstance(b, _ProtocolMeta) and
+                                       b.__origin__ is Protocol
+                                       for b in cls.__bases__)
+            if cls._is_protocol:
+                for base in cls.__mro__[1:]:
+                    if not (base in (object, Generic) or
+                            base.__module__ == 'collections.abc' and
+                            base.__name__ in _PROTO_WHITELIST or
+                            isinstance(base, TypingMeta) and base._is_protocol or
+                            isinstance(base, GenericMeta) and
+                            base.__origin__ is Generic):
+                        raise TypeError('Protocols can only inherit from other'
+                                        ' protocols, got %r' % base)
+
+                def _no_init(self, *args, **kwargs):
+                    if type(self)._is_protocol:
+                        raise TypeError('Protocols cannot be instantiated')
+                cls.__init__ = _no_init
+
+            def _proto_hook(other):
+                if not cls.__dict__.get('_is_protocol', None):
+                    return NotImplemented
+                if not isinstance(other, type):
+                    # Same error as for issubclass(1, int)
+                    raise TypeError('issubclass() arg 1 must be a class')
+                for attr in _get_protocol_attrs(cls):
+                    for base in other.__mro__:
+                        if attr in base.__dict__:
+                            if base.__dict__[attr] is None:
+                                return NotImplemented
+                            break
+                        annotations = getattr(base, '__annotations__', {})
+                        if (isinstance(annotations, typing.Mapping) and
+                                attr in annotations and
+                                isinstance(other, _ProtocolMeta) and
+                                other._is_protocol):
+                            break
+                    else:
+                        return NotImplemented
+                return True
+            if '__subclasshook__' not in cls.__dict__:
+                cls.__subclasshook__ = _proto_hook
+
+        def __instancecheck__(self, instance):
+            # We need this method for situations where attributes are
+            # assigned in __init__.
+            if ((not getattr(self, '_is_protocol', False) or
+                    _is_callable_members_only(self)) and
+                    issubclass(instance.__class__, self)):
+                return True
+            if self._is_protocol:
+                if all(hasattr(instance, attr) and
+                        (not callable(getattr(self, attr, None)) or
+                         getattr(instance, attr) is not None)
+                        for attr in _get_protocol_attrs(self)):
+                    return True
+            return super(GenericMeta, self).__instancecheck__(instance)
+
+        def __subclasscheck__(self, cls):
+            if self.__origin__ is not None:
+                if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
+                    raise TypeError("Parameterized generics cannot be used with class "
+                                    "or instance checks")
+                return False
+            if (self.__dict__.get('_is_protocol', None) and
+                    not self.__dict__.get('_is_runtime_protocol', None)):
+                if sys._getframe(1).f_globals['__name__'] in ['abc',
+                                                              'functools',
+                                                              'typing']:
+                    return False
+                raise TypeError("Instance and class checks can only be used with"
+                                " @runtime protocols")
+            if (self.__dict__.get('_is_runtime_protocol', None) and
+                    not _is_callable_members_only(self)):
+                if sys._getframe(1).f_globals['__name__'] in ['abc',
+                                                              'functools',
+                                                              'typing']:
+                    return super(GenericMeta, self).__subclasscheck__(cls)
+                raise TypeError("Protocols with non-method members"
+                                " don't support issubclass()")
+            return super(GenericMeta, self).__subclasscheck__(cls)
+
+        if not OLD_GENERICS:
+            @_tp_cache
+            def __getitem__(self, params):
+                # We also need to copy this from GenericMeta.__getitem__ to get
+                # special treatment of "Protocol". (Comments removed for brevity.)
+                if not isinstance(params, tuple):
+                    params = (params,)
+                if not params and _gorg(self) is not Tuple:
+                    raise TypeError(
+                        "Parameter list to %s[...] cannot be empty" % self.__qualname__)
+                msg = "Parameters to generic types must be types."
+                params = tuple(_type_check(p, msg) for p in params)
+                if self in (Generic, Protocol):
+                    if not all(isinstance(p, TypeVar) for p in params):
+                        raise TypeError(
+                            "Parameters to %r[...] must all be type variables" % self)
+                    if len(set(params)) != len(params):
+                        raise TypeError(
+                            "Parameters to %r[...] must all be unique" % self)
+                    tvars = params
+                    args = params
+                elif self in (Tuple, Callable):
+                    tvars = _type_vars(params)
+                    args = params
+                elif self.__origin__ in (Generic, Protocol):
+                    raise TypeError("Cannot subscript already-subscripted %s" %
+                                    repr(self))
+                else:
+                    _check_generic(self, params)
+                    tvars = _type_vars(params)
+                    args = params
+
+                prepend = (self,) if self.__origin__ is None else ()
+                return self.__class__(self.__name__,
+                                      prepend + self.__bases__,
+                                      _no_slots_copy(self.__dict__),
+                                      tvars=tvars,
+                                      args=args,
+                                      origin=self,
+                                      extra=self.__extra__,
+                                      orig_bases=self.__orig_bases__)
+
+    class Protocol(metaclass=_ProtocolMeta):
+        """Base class for protocol classes. Protocol classes are defined as::
+
+          class Proto(Protocol):
+              def meth(self) -> int:
+                  ...
+
+        Such classes are primarily used with static type checkers that recognize
+        structural subtyping (static duck-typing), for example::
+
+          class C:
+              def meth(self) -> int:
+                  return 0
+
+          def func(x: Proto) -> int:
+              return x.meth()
+
+          func(C())  # Passes static type check
+
+        See PEP 544 for details. Protocol classes decorated with
+        @typing_extensions.runtime act as simple-minded runtime protocol that checks
+        only the presence of given attributes, ignoring their type signatures.
+
+        Protocol classes can be generic, they are defined as::
+
+          class GenProto({bases}):
+              def meth(self) -> T:
+                  ...
+        """
+        __slots__ = ()
+        _is_protocol = True
+
+        def __new__(cls, *args, **kwds):
+            if _gorg(cls) is Protocol:
+                raise TypeError("Type Protocol cannot be instantiated; "
+                                "it can be used only as a base class")
+            if OLD_GENERICS:
+                return _generic_new(_next_in_mro(cls), cls, *args, **kwds)
+            return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+    if Protocol.__doc__ is not None:
+        Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if
+                                                   OLD_GENERICS else "Protocol[T]")
+
+
+elif PEP_560:
+    from typing import _type_check, _GenericAlias, _collect_type_vars  # noqa
+
+    class _ProtocolMeta(abc.ABCMeta):
+        # This metaclass is a bit unfortunate and exists only because of the lack
+        # of __instancehook__.
+        def __instancecheck__(cls, instance):
+            # We need this method for situations where attributes are
+            # assigned in __init__.
+            if ((not getattr(cls, '_is_protocol', False) or
+                    _is_callable_members_only(cls)) and
+                    issubclass(instance.__class__, cls)):
+                return True
+            if cls._is_protocol:
+                if all(hasattr(instance, attr) and
+                        (not callable(getattr(cls, attr, None)) or
+                         getattr(instance, attr) is not None)
+                        for attr in _get_protocol_attrs(cls)):
+                    return True
+            return super().__instancecheck__(instance)
+
+    class Protocol(metaclass=_ProtocolMeta):
+        # There is quite a lot of overlapping code with typing.Generic.
+        # Unfortunately it is hard to avoid this while these live in two different
+        # modules. The duplicated code will be removed when Protocol is moved to typing.
+        """Base class for protocol classes. Protocol classes are defined as::
+
+            class Proto(Protocol):
+                def meth(self) -> int:
+                    ...
+
+        Such classes are primarily used with static type checkers that recognize
+        structural subtyping (static duck-typing), for example::
+
+            class C:
+                def meth(self) -> int:
+                    return 0
+
+            def func(x: Proto) -> int:
+                return x.meth()
+
+            func(C())  # Passes static type check
+
+        See PEP 544 for details. Protocol classes decorated with
+        @typing_extensions.runtime act as simple-minded runtime protocol that checks
+        only the presence of given attributes, ignoring their type signatures.
+
+        Protocol classes can be generic, they are defined as::
+
+            class GenProto(Protocol[T]):
+                def meth(self) -> T:
+                    ...
+        """
+        __slots__ = ()
+        _is_protocol = True
+
+        def __new__(cls, *args, **kwds):
+            if cls is Protocol:
+                raise TypeError("Type Protocol cannot be instantiated; "
+                                "it can only be used as a base class")
+            return super().__new__(cls)
+
+        @_tp_cache
+        def __class_getitem__(cls, params):
+            if not isinstance(params, tuple):
+                params = (params,)
+            if not params and cls is not Tuple:
+                raise TypeError(
+                    "Parameter list to {}[...] cannot be empty".format(cls.__qualname__))
+            msg = "Parameters to generic types must be types."
+            params = tuple(_type_check(p, msg) for p in params)
+            if cls is Protocol:
+                # Generic can only be subscripted with unique type variables.
+                if not all(isinstance(p, TypeVar) for p in params):
+                    i = 0
+                    while isinstance(params[i], TypeVar):
+                        i += 1
+                    raise TypeError(
+                        "Parameters to Protocol[...] must all be type variables."
+                        " Parameter {} is {}".format(i + 1, params[i]))
+                if len(set(params)) != len(params):
+                    raise TypeError(
+                        "Parameters to Protocol[...] must all be unique")
+            else:
+                # Subscripting a regular Generic subclass.
+                _check_generic(cls, params)
+            return _GenericAlias(cls, params)
+
+        def __init_subclass__(cls, *args, **kwargs):
+            tvars = []
+            if '__orig_bases__' in cls.__dict__:
+                error = Generic in cls.__orig_bases__
+            else:
+                error = Generic in cls.__bases__
+            if error:
+                raise TypeError("Cannot inherit from plain Generic")
+            if '__orig_bases__' in cls.__dict__:
+                tvars = _collect_type_vars(cls.__orig_bases__)
+                # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
+                # If found, tvars must be a subset of it.
+                # If not found, tvars is it.
+                # Also check for and reject plain Generic,
+                # and reject multiple Generic[...] and/or Protocol[...].
+                gvars = None
+                for base in cls.__orig_bases__:
+                    if (isinstance(base, _GenericAlias) and
+                            base.__origin__ in (Generic, Protocol)):
+                        # for error messages
+                        the_base = 'Generic' if base.__origin__ is Generic else 'Protocol'
+                        if gvars is not None:
+                            raise TypeError(
+                                "Cannot inherit from Generic[...]"
+                                " and/or Protocol[...] multiple types.")
+                        gvars = base.__parameters__
+                if gvars is None:
+                    gvars = tvars
+                else:
+                    tvarset = set(tvars)
+                    gvarset = set(gvars)
+                    if not tvarset <= gvarset:
+                        s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
+                        s_args = ', '.join(str(g) for g in gvars)
+                        raise TypeError("Some type variables ({}) are"
+                                        " not listed in {}[{}]".format(s_vars,
+                                                                       the_base, s_args))
+                    tvars = gvars
+            cls.__parameters__ = tuple(tvars)
+
+            # Determine if this is a protocol or a concrete subclass.
+            if not cls.__dict__.get('_is_protocol', None):
+                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+            # Set (or override) the protocol subclass hook.
+            def _proto_hook(other):
+                if not cls.__dict__.get('_is_protocol', None):
+                    return NotImplemented
+                if not getattr(cls, '_is_runtime_protocol', False):
+                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
+                        return NotImplemented
+                    raise TypeError("Instance and class checks can only be used with"
+                                    " @runtime protocols")
+                if not _is_callable_members_only(cls):
+                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
+                        return NotImplemented
+                    raise TypeError("Protocols with non-method members"
+                                    " don't support issubclass()")
+                if not isinstance(other, type):
+                    # Same error as for issubclass(1, int)
+                    raise TypeError('issubclass() arg 1 must be a class')
+                for attr in _get_protocol_attrs(cls):
+                    for base in other.__mro__:
+                        if attr in base.__dict__:
+                            if base.__dict__[attr] is None:
+                                return NotImplemented
+                            break
+                        annotations = getattr(base, '__annotations__', {})
+                        if (isinstance(annotations, typing.Mapping) and
+                                attr in annotations and
+                                isinstance(other, _ProtocolMeta) and
+                                other._is_protocol):
+                            break
+                    else:
+                        return NotImplemented
+                return True
+            if '__subclasshook__' not in cls.__dict__:
+                cls.__subclasshook__ = _proto_hook
+
+            # We have nothing more to do for non-protocols.
+            if not cls._is_protocol:
+                return
+
+            # Check consistency of bases.
+            for base in cls.__bases__:
+                if not (base in (object, Generic) or
+                        base.__module__ == 'collections.abc' and
+                        base.__name__ in _PROTO_WHITELIST or
+                        isinstance(base, _ProtocolMeta) and base._is_protocol):
+                    raise TypeError('Protocols can only inherit from other'
+                                    ' protocols, got %r' % base)
+
+            def _no_init(self, *args, **kwargs):
+                if type(self)._is_protocol:
+                    raise TypeError('Protocols cannot be instantiated')
+            cls.__init__ = _no_init
+
+
+if hasattr(typing, 'runtime_checkable'):
+    runtime_checkable = typing.runtime_checkable
+elif HAVE_PROTOCOLS:
+    def runtime_checkable(cls):
+        """Mark a protocol class as a runtime protocol, so that it
+        can be used with isinstance() and issubclass(). Raise TypeError
+        if applied to a non-protocol class.
+
+        This allows a simple-minded structural check very similar to the
+        one-offs in collections.abc such as Hashable.
+        """
+        if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
+            raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+                            ' got %r' % cls)
+        cls._is_runtime_protocol = True
+        return cls
+
+
+if HAVE_PROTOCOLS:
+    # Exists for backwards compatibility.
+    runtime = runtime_checkable
+
+
+if hasattr(typing, 'SupportsIndex'):
+    SupportsIndex = typing.SupportsIndex
+elif HAVE_PROTOCOLS:
+    @runtime_checkable
+    class SupportsIndex(Protocol):
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __index__(self) -> int:
+            pass
+
+
+if sys.version_info[:2] >= (3, 9):
+    # The standard library TypedDict in Python 3.8 does not store runtime information
+    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
+    TypedDict = typing.TypedDict
+else:
+    def _check_fails(cls, other):
+        try:
+            if sys._getframe(1).f_globals['__name__'] not in ['abc',
+                                                              'functools',
+                                                              'typing']:
+                # Typed dicts are only for static structural subtyping.
+                raise TypeError('TypedDict does not support instance and class checks')
+        except (AttributeError, ValueError):
+            pass
+        return False
+
+    def _dict_new(*args, **kwargs):
+        if not args:
+            raise TypeError('TypedDict.__new__(): not enough arguments')
+        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
+        return dict(*args, **kwargs)
+
+    _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
+
+    def _typeddict_new(*args, total=True, **kwargs):
+        if not args:
+            raise TypeError('TypedDict.__new__(): not enough arguments')
+        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
+        if args:
+            typename, args = args[0], args[1:]  # allow the "_typename" keyword be passed
+        elif '_typename' in kwargs:
+            typename = kwargs.pop('_typename')
+            import warnings
+            warnings.warn("Passing '_typename' as keyword argument is deprecated",
+                          DeprecationWarning, stacklevel=2)
+        else:
+            raise TypeError("TypedDict.__new__() missing 1 required positional "
+                            "argument: '_typename'")
+        if args:
+            try:
+                fields, = args  # allow the "_fields" keyword be passed
+            except ValueError:
+                raise TypeError('TypedDict.__new__() takes from 2 to 3 '
+                                'positional arguments but {} '
+                                'were given'.format(len(args) + 2))
+        elif '_fields' in kwargs and len(kwargs) == 1:
+            fields = kwargs.pop('_fields')
+            import warnings
+            warnings.warn("Passing '_fields' as keyword argument is deprecated",
+                          DeprecationWarning, stacklevel=2)
+        else:
+            fields = None
+
+        if fields is None:
+            fields = kwargs
+        elif kwargs:
+            raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                            " but not both")
+
+        ns = {'__annotations__': dict(fields), '__total__': total}
+        try:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+        except (AttributeError, ValueError):
+            pass
+
+        return _TypedDictMeta(typename, (), ns)
+
+    _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
+                                         ' /, *, total=True, **kwargs)')
+
+    class _TypedDictMeta(type):
+        def __new__(cls, name, bases, ns, total=True):
+            # Create new typed dict class object.
+            # This method is called directly when TypedDict is subclassed,
+            # or via _typeddict_new when TypedDict is instantiated. This way
+            # TypedDict supports all three syntaxes described in its docstring.
+            # Subclasses and instances of TypedDict return actual dictionaries
+            # via _dict_new.
+            ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
+            tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
+
+            annotations = {}
+            own_annotations = ns.get('__annotations__', {})
+            own_annotation_keys = set(own_annotations.keys())
+            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+            own_annotations = {
+                n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
+            }
+            required_keys = set()
+            optional_keys = set()
+
+            for base in bases:
+                annotations.update(base.__dict__.get('__annotations__', {}))
+                required_keys.update(base.__dict__.get('__required_keys__', ()))
+                optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+            annotations.update(own_annotations)
+            if total:
+                required_keys.update(own_annotation_keys)
+            else:
+                optional_keys.update(own_annotation_keys)
+
+            tp_dict.__annotations__ = annotations
+            tp_dict.__required_keys__ = frozenset(required_keys)
+            tp_dict.__optional_keys__ = frozenset(optional_keys)
+            if not hasattr(tp_dict, '__total__'):
+                tp_dict.__total__ = total
+            return tp_dict
+
+        __instancecheck__ = __subclasscheck__ = _check_fails
+
+    TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
+    TypedDict.__module__ = __name__
+    TypedDict.__doc__ = \
+        """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+        TypedDict creates a dictionary type that expects all of its
+        instances to have a certain set of keys, with each key
+        associated with a value of a consistent type. This expectation
+        is not checked at runtime but is only enforced by type checkers.
+        Usage::
+
+            class Point2D(TypedDict):
+                x: int
+                y: int
+                label: str
+
+            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+
+            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+        The type info can be accessed via the Point2D.__annotations__ dict, and
+        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+        TypedDict supports two additional equivalent forms::
+
+            Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+        The class syntax is only supported in Python 3.6+, while two other
+        syntax forms work for Python 2.7 and 3.2+
+        """
+
+
+# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
+if hasattr(typing, 'Annotated'):
+    Annotated = typing.Annotated
+    get_type_hints = typing.get_type_hints
+    # Not exported and not a public API, but needed for get_origin() and get_args()
+    # to work.
+    _AnnotatedAlias = typing._AnnotatedAlias
+elif PEP_560:
+    class _AnnotatedAlias(typing._GenericAlias, _root=True):
+        """Runtime representation of an annotated type.
+
+        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+        with extra annotations. The alias behaves like a normal typing alias,
+        instantiating is the same as instantiating the underlying type, binding
+        it to types is also the same.
+        """
+        def __init__(self, origin, metadata):
+            if isinstance(origin, _AnnotatedAlias):
+                metadata = origin.__metadata__ + metadata
+                origin = origin.__origin__
+            super().__init__(origin, origin)
+            self.__metadata__ = metadata
+
+        def copy_with(self, params):
+            assert len(params) == 1
+            new_type = params[0]
+            return _AnnotatedAlias(new_type, self.__metadata__)
+
+        def __repr__(self):
+            return "typing_extensions.Annotated[{}, {}]".format(
+                typing._type_repr(self.__origin__),
+                ", ".join(repr(a) for a in self.__metadata__)
+            )
+
+        def __reduce__(self):
+            return operator.getitem, (
+                Annotated, (self.__origin__,) + self.__metadata__
+            )
+
+        def __eq__(self, other):
+            if not isinstance(other, _AnnotatedAlias):
+                return NotImplemented
+            if self.__origin__ != other.__origin__:
+                return False
+            return self.__metadata__ == other.__metadata__
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__metadata__))
+
+    class Annotated:
+        """Add context specific metadata to a type.
+
+        Example: Annotated[int, runtime_check.Unsigned] indicates to the
+        hypothetical runtime_check module that this type is an unsigned int.
+        Every other consumer of this type can ignore this metadata and treat
+        this type as int.
+
+        The first argument to Annotated must be a valid type (and will be in
+        the __origin__ field), the remaining arguments are kept as a tuple in
+        the __extra__ field.
+
+        Details:
+
+        - It's an error to call `Annotated` with less than two arguments.
+        - Nested Annotated are flattened::
+
+            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+        - Instantiating an annotated type is equivalent to instantiating the
+        underlying type::
+
+            Annotated[C, Ann1](5) == C(5)
+
+        - Annotated can be used as a generic type alias::
+
+            Optimized = Annotated[T, runtime.Optimize()]
+            Optimized[int] == Annotated[int, runtime.Optimize()]
+
+            OptimizedList = Annotated[List[T], runtime.Optimize()]
+            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+        """
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwargs):
+            raise TypeError("Type Annotated cannot be instantiated.")
+
+        @_tp_cache
+        def __class_getitem__(cls, params):
+            if not isinstance(params, tuple) or len(params) < 2:
+                raise TypeError("Annotated[...] should be used "
+                                "with at least two arguments (a type and an "
+                                "annotation).")
+            msg = "Annotated[t, ...]: t must be a type."
+            origin = typing._type_check(params[0], msg)
+            metadata = tuple(params[1:])
+            return _AnnotatedAlias(origin, metadata)
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                "Cannot subclass {}.Annotated".format(cls.__module__)
+            )
+
+    def _strip_annotations(t):
+        """Strips the annotations from a given type.
+        """
+        if isinstance(t, _AnnotatedAlias):
+            return _strip_annotations(t.__origin__)
+        if isinstance(t, typing._GenericAlias):
+            stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            res = t.copy_with(stripped_args)
+            res._special = t._special
+            return res
+        return t
+
+    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+        """Return type hints for an object.
+
+        This is often the same as obj.__annotations__, but it handles
+        forward references encoded as string literals, adds Optional[t] if a
+        default value equal to None is set and recursively replaces all
+        'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+
+        The argument may be a module, class, method, or function. The annotations
+        are returned as a dictionary. For classes, annotations include also
+        inherited members.
+
+        TypeError is raised if the argument is not of a type that can contain
+        annotations, and an empty dictionary is returned if no annotations are
+        present.
+
+        BEWARE -- the behavior of globalns and localns is counterintuitive
+        (unless you are familiar with how eval() and exec() work).  The
+        search order is locals first, then globals.
+
+        - If no dict arguments are passed, an attempt is made to use the
+          globals from obj (or the respective module's globals for classes),
+          and these are also used as the locals.  If the object does not appear
+          to have globals, an empty dictionary is used.
+
+        - If one dict argument is passed, it is used for both globals and
+          locals.
+
+        - If two dict arguments are passed, they specify globals and
+          locals, respectively.
+        """
+        hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+        if include_extras:
+            return hint
+        return {k: _strip_annotations(t) for k, t in hint.items()}
+
+elif HAVE_ANNOTATED:
+
+    def _is_dunder(name):
+        """Returns True if name is a __dunder_variable_name__."""
+        return len(name) > 4 and name.startswith('__') and name.endswith('__')
+
+    # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
+    # checks, argument expansion etc. are done on the _subs_tre. As a result we
+    # can't provide a get_type_hints function that strips out annotations.
+
+    class AnnotatedMeta(typing.GenericMeta):
+        """Metaclass for Annotated"""
+
+        def __new__(cls, name, bases, namespace, **kwargs):
+            if any(b is not object for b in bases):
+                raise TypeError("Cannot subclass " + str(Annotated))
+            return super().__new__(cls, name, bases, namespace, **kwargs)
+
+        @property
+        def __metadata__(self):
+            return self._subs_tree()[2]
+
+        def _tree_repr(self, tree):
+            cls, origin, metadata = tree
+            if not isinstance(origin, tuple):
+                tp_repr = typing._type_repr(origin)
+            else:
+                tp_repr = origin[0]._tree_repr(origin)
+            metadata_reprs = ", ".join(repr(arg) for arg in metadata)
+            return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs)
+
+        def _subs_tree(self, tvars=None, args=None):  # noqa
+            if self is Annotated:
+                return Annotated
+            res = super()._subs_tree(tvars=tvars, args=args)
+            # Flatten nested Annotated
+            if isinstance(res[1], tuple) and res[1][0] is Annotated:
+                sub_tp = res[1][1]
+                sub_annot = res[1][2]
+                return (Annotated, sub_tp, sub_annot + res[2])
+            return res
+
+        def _get_cons(self):
+            """Return the class used to create instance of this type."""
+            if self.__origin__ is None:
+                raise TypeError("Cannot get the underlying type of a "
+                                "non-specialized Annotated type.")
+            tree = self._subs_tree()
+            while isinstance(tree, tuple) and tree[0] is Annotated:
+                tree = tree[1]
+            if isinstance(tree, tuple):
+                return tree[0]
+            else:
+                return tree
+
+        @_tp_cache
+        def __getitem__(self, params):
+            if not isinstance(params, tuple):
+                params = (params,)
+            if self.__origin__ is not None:  # specializing an instantiated type
+                return super().__getitem__(params)
+            elif not isinstance(params, tuple) or len(params) < 2:
+                raise TypeError("Annotated[...] should be instantiated "
+                                "with at least two arguments (a type and an "
+                                "annotation).")
+            else:
+                msg = "Annotated[t, ...]: t must be a type."
+                tp = typing._type_check(params[0], msg)
+                metadata = tuple(params[1:])
+            return self.__class__(
+                self.__name__,
+                self.__bases__,
+                _no_slots_copy(self.__dict__),
+                tvars=_type_vars((tp,)),
+                # Metadata is a tuple so it won't be touched by _replace_args et al.
+                args=(tp, metadata),
+                origin=self,
+            )
+
+        def __call__(self, *args, **kwargs):
+            cons = self._get_cons()
+            result = cons(*args, **kwargs)
+            try:
+                result.__orig_class__ = self
+            except AttributeError:
+                pass
+            return result
+
+        def __getattr__(self, attr):
+            # For simplicity we just don't relay all dunder names
+            if self.__origin__ is not None and not _is_dunder(attr):
+                return getattr(self._get_cons(), attr)
+            raise AttributeError(attr)
+
+        def __setattr__(self, attr, value):
+            if _is_dunder(attr) or attr.startswith('_abc_'):
+                super().__setattr__(attr, value)
+            elif self.__origin__ is None:
+                raise AttributeError(attr)
+            else:
+                setattr(self._get_cons(), attr, value)
+
+        def __instancecheck__(self, obj):
+            raise TypeError("Annotated cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("Annotated cannot be used with issubclass().")
+
+    class Annotated(metaclass=AnnotatedMeta):
+        """Add context specific metadata to a type.
+
+        Example: Annotated[int, runtime_check.Unsigned] indicates to the
+        hypothetical runtime_check module that this type is an unsigned int.
+        Every other consumer of this type can ignore this metadata and treat
+        this type as int.
+
+        The first argument to Annotated must be a valid type, the remaining
+        arguments are kept as a tuple in the __metadata__ field.
+
+        Details:
+
+        - It's an error to call `Annotated` with less than two arguments.
+        - Nested Annotated are flattened::
+
+            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+        - Instantiating an annotated type is equivalent to instantiating the
+        underlying type::
+
+            Annotated[C, Ann1](5) == C(5)
+
+        - Annotated can be used as a generic type alias::
+
+            Optimized = Annotated[T, runtime.Optimize()]
+            Optimized[int] == Annotated[int, runtime.Optimize()]
+
+            OptimizedList = Annotated[List[T], runtime.Optimize()]
+            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+        """
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those, only Python 3.9 versions will do.
+if sys.version_info[:2] >= (3, 9):
+    get_origin = typing.get_origin
+    get_args = typing.get_args
+elif PEP_560:
+    from typing import _GenericAlias  # noqa
+
+    def get_origin(tp):
+        """Get the unsubscripted version of a type.
+
+        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+        and Annotated. Return None for unsupported types. Examples::
+
+            get_origin(Literal[42]) is Literal
+            get_origin(int) is None
+            get_origin(ClassVar[int]) is ClassVar
+            get_origin(Generic) is Generic
+            get_origin(Generic[T]) is Generic
+            get_origin(Union[T, int]) is Union
+            get_origin(List[Tuple[T, T]][int]) == list
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return Annotated
+        if isinstance(tp, _GenericAlias):
+            return tp.__origin__
+        if tp is Generic:
+            return Generic
+        return None
+
+    def get_args(tp):
+        """Get type arguments with all substitutions performed.
+
+        For unions, basic simplifications used by Union constructor are performed.
+        Examples::
+            get_args(Dict[str, int]) == (str, int)
+            get_args(int) == ()
+            get_args(Union[int, Union[T, int], str][int]) == (int, str)
+            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+            get_args(Callable[[], T][int]) == ([], int)
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return (tp.__origin__,) + tp.__metadata__
+        if isinstance(tp, _GenericAlias):
+            res = tp.__args__
+            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return ()
+
+
+if hasattr(typing, 'TypeAlias'):
+    TypeAlias = typing.TypeAlias
+elif sys.version_info[:2] >= (3, 9):
+    class _TypeAliasForm(typing._SpecialForm, _root=True):
+        def __repr__(self):
+            return 'typing_extensions.' + self._name
+
+    @_TypeAliasForm
+    def TypeAlias(self, parameters):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        raise TypeError("{} is not subscriptable".format(self))
+
+elif sys.version_info[:2] >= (3, 7):
+    class _TypeAliasForm(typing._SpecialForm, _root=True):
+        def __repr__(self):
+            return 'typing_extensions.' + self._name
+
+    TypeAlias = _TypeAliasForm('TypeAlias',
+                               doc="""Special marker indicating that an assignment should
+                               be recognized as a proper type alias definition by type
+                               checkers.
+
+                               For example::
+
+                                   Predicate: TypeAlias = Callable[..., bool]
+
+                               It's invalid when used anywhere except as in the example
+                               above.""")
+
+elif hasattr(typing, '_FinalTypingBase'):
+    class _TypeAliasMeta(typing.TypingMeta):
+        """Metaclass for TypeAlias"""
+
+        def __repr__(self):
+            return 'typing_extensions.TypeAlias'
+
+    class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        __slots__ = ()
+
+        def __instancecheck__(self, obj):
+            raise TypeError("TypeAlias cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("TypeAlias cannot be used with issubclass().")
+
+        def __repr__(self):
+            return 'typing_extensions.TypeAlias'
+
+    TypeAlias = _TypeAliasBase(_root=True)
+else:
+    class _TypeAliasMeta(typing.TypingMeta):
+        """Metaclass for TypeAlias"""
+
+        def __instancecheck__(self, obj):
+            raise TypeError("TypeAlias cannot be used with isinstance().")
+
+        def __subclasscheck__(self, cls):
+            raise TypeError("TypeAlias cannot be used with issubclass().")
+
+        def __call__(self, *args, **kwargs):
+            raise TypeError("Cannot instantiate TypeAlias")
+
+    class TypeAlias(metaclass=_TypeAliasMeta, _root=True):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        __slots__ = ()
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/DESCRIPTION.rst b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/DESCRIPTION.rst
new file mode 100644
index 00000000..5b2a1788
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/DESCRIPTION.rst
@@ -0,0 +1,1323 @@
+
+urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+Python ecosystem already uses urllib3 and you should too.
+urllib3 brings many critical features that are missing from the Python
+standard libraries:
+
+- Thread safety.
+- Connection pooling.
+- Client-side SSL/TLS verification.
+- File uploads with multipart encoding.
+- Helpers for retrying requests and dealing with HTTP redirects.
+- Support for gzip, deflate, and brotli encoding.
+- Proxy support for HTTP and SOCKS.
+- 100% test coverage.
+
+urllib3 is powerful and easy to use:
+
+.. code-block:: python
+
+    >>> import urllib3
+    >>> http = urllib3.PoolManager()
+    >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+    >>> r.status
+    200
+    >>> r.data
+    'User-agent: *\nDisallow: /deny\n'
+
+
+Installing
+----------
+
+urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+    $ python -m pip install urllib3
+
+Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+    $ git clone git://github.com/urllib3/urllib3.git
+    $ python setup.py install
+
+
+Documentation
+-------------
+
+urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+Contributing
+------------
+
+urllib3 happily accepts contributions. Please see our
+`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+for some tips on getting started.
+
+
+Security Disclosures
+--------------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+
+Maintainers
+-----------
+
+- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
+- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
+- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
+- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
+- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
+- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
+- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
+
+👋
+
+
+Sponsorship
+-----------
+
+If your company benefits from this library, please consider `sponsoring its
+development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
+
+
+For Enterprise
+--------------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+   :width: 75
+   :alt: Tidelift
+
+.. list-table::
+   :widths: 10 100
+
+   * - |tideliftlogo|
+     - Professional support for urllib3 is available as part of the `Tidelift
+       Subscription`_.  Tidelift gives software development teams a single source for
+       purchasing and maintaining their software, with professional grade assurances
+       from the experts who know it best, while seamlessly integrating with existing
+       tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+
+Changes
+=======
+
+1.26.4 (2021-03-15)
+-------------------
+
+* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
+  during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
+
+
+1.26.3 (2021-01-26)
+-------------------
+
+* Fixed bytes and string comparison issue with headers (Pull #2141)
+
+* Changed ``ProxySchemeUnknown`` error message to be
+  more actionable if the user supplies a proxy URL without
+  a scheme. (Pull #2107)
+
+
+1.26.2 (2020-11-12)
+-------------------
+
+* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
+  be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+
+1.26.1 (2020-11-11)
+-------------------
+
+* Fixed an issue where two ``User-Agent`` headers would be sent if a
+  ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
+
+
+1.26.0 (2020-11-10)
+-------------------
+
+* **NOTE: urllib3 v2.0 will drop support for Python 2**.
+  `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
+
+* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+  still wish to use TLS earlier than 1.2 without a deprecation warning
+  should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
+  **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
+
+* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
+  and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
+  ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
+  (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
+
+* Added default ``User-Agent`` header to every request (Pull #1750)
+
+* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``, 
+  and ``Host`` headers from being automatically emitted with requests (Pull #2018)
+
+* Collapse ``transfer-encoding: chunked`` request data and framing into
+  the same ``socket.send()`` call (Pull #1906)
+
+* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
+
+* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
+
+* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
+  to SecureTransport (Pull #1903)
+
+* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
+
+* Suppress ``BrokenPipeError`` when writing request body after the server
+  has closed the socket (Pull #1524)
+
+* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
+  into an ``urllib3.exceptions.SSLError`` (Pull #1939)
+
+
+1.25.11 (2020-10-19)
+--------------------
+
+* Fix retry backoff time parsed from ``Retry-After`` header when given
+  in the HTTP date format. The HTTP date was parsed as the local timezone
+  rather than accounting for the timezone in the HTTP date (typically
+  UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
+
+* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
+  environment variable was set to the empty string. Now ``SSLContext.keylog_file``
+  is not set in this situation (Pull #2016)
+
+
+1.25.10 (2020-07-22)
+--------------------
+
+* Added support for ``SSLKEYLOGFILE`` environment variable for
+  logging TLS session keys with use with programs like
+  Wireshark for decrypting captured web traffic (Pull #1867)
+
+* Fixed loading of SecureTransport libraries on macOS Big Sur
+  due to the new dynamic linker cache (Pull #1905)
+
+* Collapse chunked request bodies data and framing into one
+  call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
+
+* Don't insert ``None`` into ``ConnectionPool`` if the pool
+  was empty when requesting a connection (Pull #1866)
+
+* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
+
+
+1.25.9 (2020-04-16)
+-------------------
+
+* Added ``InvalidProxyConfigurationWarning`` which is raised when
+  erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+  support connecting to HTTPS proxies but will soon be able to
+  and we would like users to migrate properly without much breakage.
+
+  See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+  for more information on how to fix your proxy config. (Pull #1851)
+
+* Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+* Allow the CA certificate data to be passed as a string (Pull #1804)
+
+* Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+* Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+1.25.8 (2020-01-20)
+-------------------
+
+* Drop support for EOL Python 3.4 (Pull #1774)
+
+* Optimize _encode_invalid_chars (Pull #1787)
+
+
+1.25.7 (2019-11-11)
+-------------------
+
+* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+* Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+1.25.6 (2019-09-24)
+-------------------
+
+* Fix issue where tilde (``~``) characters were incorrectly
+  percent-encoded in the path. (Pull #1692)
+
+
+1.25.5 (2019-09-19)
+-------------------
+
+* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+  caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+  (Issue #1682)
+
+
+1.25.4 (2019-09-19)
+-------------------
+
+* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+* Fix edge case where Retry-After header was still respected even when
+  explicitly opted out of. (Pull #1607)
+
+* Remove dependency on ``rfc3986`` for URL parsing.
+
+* Fix issue where URLs containing invalid characters within ``Url.auth`` would
+  raise an exception instead of percent-encoding those characters.
+
+* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+  work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+1.25.3 (2019-05-23)
+-------------------
+
+* Change ``HTTPSConnection`` to load system CA certificates
+  when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+  unspecified. (Pull #1608, Issue #1603)
+
+* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+1.25.2 (2019-04-28)
+-------------------
+
+* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+* Change ``parse_url`` to percent-encode invalid characters within the
+  path, query, and target components. (Pull #1586)
+
+
+1.25.1 (2019-04-24)
+-------------------
+
+* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+1.25 (2019-04-22)
+-----------------
+
+* Require and validate certificates by default when using HTTPS (Pull #1507)
+
+* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+  encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+  implementations. (Pull #1496)
+
+* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
+
+* Fixed issue where OpenSSL would block if an encrypted client private key was
+  given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+* Added support for Brotli content encoding. It is enabled automatically if
+  ``brotlipy`` package is installed which can be requested with
+  ``urllib3[brotli]`` extra. (Pull #1532)
+
+* Drop ciphers using DSS key exchange from default TLS cipher suites.
+  Improve default ciphers when using SecureTransport. (Pull #1496)
+
+* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+1.24.3 (2019-05-01)
+-------------------
+
+* Apply fix for CVE-2019-9740. (Pull #1591)
+
+1.24.2 (2019-04-17)
+-------------------
+
+* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+  ``ssl_context`` parameters are specified.
+
+* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+1.24.1 (2018-11-02)
+-------------------
+
+* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+1.24 (2018-10-16)
+-----------------
+
+* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+* Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+* Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+* Change ambiguous description of backoff_factor (Pull #1436)
+
+* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+* Add a server_hostname parameter to HTTPSConnection which allows for
+  overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+* Fixed bug where responses with header Content-Type: message/* erroneously
+  raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+* Move urllib3 to src/urllib3 (Pull #1409)
+
+
+1.23 (2018-06-04)
+-----------------
+
+* Allow providing a list of headers to strip from requests when redirecting
+  to a different host. Defaults to the ``Authorization`` header. Different
+  headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+* Dropped Python 3.3 support. (Pull #1242)
+
+* Put the connection back in the pool when calling stream() or read_chunked() on
+  a chunked HEAD response. (Issue #1234)
+
+* Fixed pyOpenSSL-specific ssl client authentication issue when clients
+  attempted to auth via certificate + chain (Issue #1060)
+
+* Add the port to the connectionpool connect print (Pull #1251)
+
+* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+* Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
+1.22 (2017-07-20)
+-----------------
+
+* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+  IPv6 proxy. (Issue #1222)
+
+* Made the connection pool retry on ``SSLError``.  The original ``SSLError``
+  is available on ``MaxRetryError.reason``. (Issue #1112)
+
+* Drain and release connection before recursing on retry/redirect.  Fixes
+  deadlocks with a blocking connectionpool. (Issue #1167)
+
+* Fixed compatibility for cookiejar. (Issue #1229)
+
+* pyopenssl: Use vendored version of ``six``. (Issue #1231)
+
+
+1.21.1 (2017-05-02)
+-------------------
+
+* Fixed SecureTransport issue that would cause long delays in response body
+  delivery. (Pull #1154)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+  ``socket_options`` flag to the ``PoolManager``.  (Issue #1165)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+  ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+  (Pull #1157)
+
+
+1.21 (2017-04-25)
+-----------------
+
+* Improved performance of certain selector system calls on Python 3.5 and
+  later. (Pull #1095)
+
+* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+  exceptions appropriately when sending data. (Pull #1125)
+
+* Selectors now detects a monkey-patched select module after import for modules
+  that patch the select module like eventlet, greenlet. (Pull #1128)
+
+* Reduced memory consumption when streaming zlib-compressed responses
+  (as opposed to raw deflate streams). (Pull #1129)
+
+* Connection pools now use the entire request context when constructing the
+  pool key. (Pull #1016)
+
+* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+  ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+  (Pull #1016)
+
+* Add retry counter for ``status_forcelist``. (Issue #1147)
+
+* Added ``contrib`` module for using SecureTransport on macOS:
+  ``urllib3.contrib.securetransport``.  (Pull #1122)
+
+* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+  for schemes it does not recognise, it assumes they are case-sensitive and
+  leaves them unchanged.
+  (Issue #1080)
+
+
+1.20 (2017-01-19)
+-----------------
+
+* Added support for waiting for I/O using selectors other than select,
+  improving urllib3's behaviour with large numbers of concurrent connections.
+  (Pull #1001)
+
+* Updated the date for the system clock check. (Issue #1005)
+
+* ConnectionPools now correctly consider hostnames to be case-insensitive.
+  (Issue #1032)
+
+* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+  to fail when it is injected, rather than at first use. (Pull #1063)
+
+* Outdated versions of cryptography now cause the PyOpenSSL contrib module
+  to fail when it is injected, rather than at first use. (Issue #1044)
+
+* Automatically attempt to rewind a file-like body object when a request is
+  retried or redirected. (Pull #1039)
+
+* Fix some bugs that occur when modules incautiously patch the queue module.
+  (Pull #1061)
+
+* Prevent retries from occurring on read timeouts for which the request method
+  was not in the method whitelist. (Issue #1059)
+
+* Changed the PyOpenSSL contrib module to lazily load idna to avoid
+  unnecessarily bloating the memory of programs that don't need it. (Pull
+  #1076)
+
+* Add support for IPv6 literals with zone identifiers. (Pull #1013)
+
+* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+  proxies, and controlled remote DNS appropriately. (Issue #1035)
+
+
+1.19.1 (2016-11-16)
+-------------------
+
+* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+
+
+1.19 (2016-11-03)
+-----------------
+
+* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+  using the default retry logic. (Pull #955)
+
+* Remove markers from setup.py to assist ancient setuptools versions. (Issue
+  #986)
+
+* Disallow superscripts and other integerish things in URL ports. (Issue #989)
+
+* Allow urllib3's HTTPResponse.stream() method to continue to work with
+  non-httplib underlying FPs. (Pull #990)
+
+* Empty filenames in multipart headers are now emitted as such, rather than
+  being suppressed. (Issue #1015)
+
+* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+
+
+1.18.1 (2016-10-27)
+-------------------
+
+* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+  PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+  release fixes a vulnerability whereby urllib3 in the above configuration
+  would silently fail to validate TLS certificates due to erroneously setting
+  invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+  flags do not cause a problem in OpenSSL versions before 1.1.0, which
+  interprets the presence of any flag as requesting certificate validation.
+
+  There is no PR for this patch, as it was prepared for simultaneous disclosure
+  and release. The master branch received the same fix in Pull #1010.
+
+
+1.18 (2016-09-26)
+-----------------
+
+* Fixed incorrect message for IncompleteRead exception. (Pull #973)
+
+* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+  (Issue #258)
+
+* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+  (Issue #977)
+
+* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+
+
+1.17 (2016-09-06)
+-----------------
+
+* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+
+* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+
+* Substantially refactored documentation. (Issue #887)
+
+* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+  (Issue #858)
+
+* Normalize the scheme and host in the URL parser (Issue #833)
+
+* ``HTTPResponse`` contains the last ``Retry`` object, which now also
+  contains retries history. (Issue #848)
+
+* Timeout can no longer be set as boolean, and must be greater than zero.
+  (Pull #924)
+
+* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+  now use cryptography and idna, both of which are already dependencies of
+  PyOpenSSL. (Pull #930)
+
+* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+
+* Try to use the operating system's certificates when we are using an
+  ``SSLContext``. (Pull #941)
+
+* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+  ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
+
+* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
+
+* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
+
+* Implemented ``length_remaining`` to determine remaining content
+  to be read. (Pull #949)
+
+* Implemented ``enforce_content_length`` to enable exceptions when
+  incomplete data chunks are received. (Pull #949)
+
+* Dropped connection start, dropped connection reset, redirect, forced retry,
+  and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
+
+
+1.16 (2016-06-11)
+-----------------
+
+* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+
+* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+  overridden. (Issue #830)
+
+* Normalize scheme and host to lowercase for pool keys, and include
+  ``source_address``. (Issue #830)
+
+* Cleaner exception chain in Python 3 for ``_make_request``.
+  (Issue #861)
+
+* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+
+* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+  called by subclasses. (Issue #873)
+
+* Retain ``release_conn`` state across retries. (Issues #651, #866)
+
+* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+  ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+
+
+1.15.1 (2016-04-11)
+-------------------
+
+* Fix packaging to include backports module. (Issue #841)
+
+
+1.15 (2016-04-06)
+-----------------
+
+* Added Retry(raise_on_status=False). (Issue #720)
+
+* Always use setuptools, no more distutils fallback. (Issue #785)
+
+* Dropped support for Python 3.2. (Issue #786)
+
+* Chunked transfer encoding when requesting with ``chunked=True``.
+  (Issue #790)
+
+* Fixed regression with IPv6 port parsing. (Issue #801)
+
+* Append SNIMissingWarning messages to allow users to specify it in
+  the PYTHONWARNINGS environment variable. (Issue #816)
+
+* Handle unicode headers in Py2. (Issue #818)
+
+* Log certificate when there is a hostname mismatch. (Issue #820)
+
+* Preserve order of request/response headers. (Issue #821)
+
+
+1.14 (2015-12-29)
+-----------------
+
+* contrib: SOCKS proxy support! (Issue #762)
+
+* Fixed AppEngine handling of transfer-encoding header and bug
+  in Timeout defaults checking. (Issue #763)
+
+
+1.13.1 (2015-12-18)
+-------------------
+
+* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+1.13 (2015-12-14)
+-----------------
+
+* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+* Close connections more defensively on exception. (Issue #734)
+
+* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+  repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
+1.12 (2015-09-03)
+-----------------
+
+* Rely on ``six`` for importing ``httplib`` to work around
+  conflicts with other Python 3 shims. (Issue #688)
+
+* Add support for directories of certificate authorities, as supported by
+  OpenSSL. (Issue #701)
+
+* New exception: ``NewConnectionError``, raised when we fail to establish
+  a new connection, usually ``ECONNREFUSED`` socket error.
+
+
+1.11 (2015-07-21)
+-----------------
+
+* When ``ca_certs`` is given, ``cert_reqs`` defaults to
+  ``'CERT_REQUIRED'``. (Issue #650)
+
+* ``pip install urllib3[secure]`` will install Certifi and
+  PyOpenSSL as dependencies. (Issue #678)
+
+* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+  (Issues #632, #679)
+
+* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+  which has an ``AppEngineManager`` for using ``URLFetch`` in a
+  Google AppEngine environment. (Issue #664)
+
+* Dev: Added test suite for AppEngine. (Issue #631)
+
+* Fix performance regression when using PyOpenSSL. (Issue #626)
+
+* Passing incorrect scheme (e.g. ``foo://``) will raise
+  ``ValueError`` instead of ``AssertionError`` (backwards
+  compatible for now, but please migrate). (Issue #640)
+
+* Fix pools not getting replenished when an error occurs during a
+  request using ``release_conn=False``. (Issue #644)
+
+* Fix pool-default headers not applying for url-encoded requests
+  like GET. (Issue #657)
+
+* log.warning in Python 3 when headers are skipped due to parsing
+  errors. (Issue #642)
+
+* Close and discard connections if an error occurs during read.
+  (Issue #660)
+
+* Fix host parsing for IPv6 proxies. (Issue #668)
+
+* Separate warning type SubjectAltNameWarning, now issued once
+  per host. (Issue #671)
+
+* Fix ``httplib.IncompleteRead`` not getting converted to
+  ``ProtocolError`` when using ``HTTPResponse.stream()``
+  (Issue #674)
+
+1.10.4 (2015-05-03)
+-------------------
+
+* Migrate tests to Tornado 4. (Issue #594)
+
+* Append default warning configuration rather than overwrite.
+  (Issue #603)
+
+* Fix streaming decoding regression. (Issue #595)
+
+* Fix chunked requests losing state across keep-alive connections.
+  (Issue #599)
+
+* Fix hanging when chunked HEAD response has no body. (Issue #605)
+
+
+1.10.3 (2015-04-21)
+-------------------
+
+* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+  (Issue #558)
+
+* Fix regression of duplicate header keys being discarded.
+  (Issue #563)
+
+* ``Response.stream()`` returns a generator for chunked responses.
+  (Issue #560)
+
+* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+  (Issue #585)
+
+* Work on platforms without `ssl` module for plain HTTP requests.
+  (Issue #587)
+
+* Stop relying on the stdlib's default cipher list. (Issue #588)
+
+
+1.10.2 (2015-02-25)
+-------------------
+
+* Fix file descriptor leakage on retries. (Issue #548)
+
+* Removed RC4 from default cipher list. (Issue #551)
+
+* Header performance improvements. (Issue #544)
+
+* Fix PoolManager not obeying redirect retry settings. (Issue #553)
+
+
+1.10.1 (2015-02-10)
+-------------------
+
+* Pools can be used as context managers. (Issue #545)
+
+* Don't re-use connections which experienced an SSLError. (Issue #529)
+
+* Don't fail when gzip decoding an empty stream. (Issue #535)
+
+* Add sha256 support for fingerprint verification. (Issue #540)
+
+* Fixed handling of header values containing commas. (Issue #533)
+
+
+1.10 (2014-12-14)
+-----------------
+
+* Disabled SSLv3. (Issue #473)
+
+* Add ``Url.url`` property to return the composed url string. (Issue #394)
+
+* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+
+* ``MaxRetryError.reason`` will always be an exception, not string.
+  (Issue #481)
+
+* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+
+* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+
+* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+  (Issue #496)
+
+* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+  (Issue #499)
+
+* Close and discard sockets which experienced SSL-related errors.
+  (Issue #501)
+
+* Handle ``body`` param in ``.request(...)``. (Issue #513)
+
+* Respect timeout with HTTPS proxy. (Issue #505)
+
+* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+
+
+1.9.1 (2014-09-13)
+------------------
+
+* Apply socket arguments before binding. (Issue #427)
+
+* More careful checks if fp-like object is closed. (Issue #435)
+
+* Fixed packaging issues of some development-related files not
+  getting included. (Issue #440)
+
+* Allow performing *only* fingerprint verification. (Issue #444)
+
+* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+
+* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+
+* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+  (Issue #443)
+
+
+
+1.9 (2014-07-04)
+----------------
+
+* Shuffled around development-related files. If you're maintaining a distro
+  package of urllib3, you may need to tweak things. (Issue #415)
+
+* Unverified HTTPS requests will trigger a warning on the first request. See
+  our new `security documentation
+  <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+  (Issue #426)
+
+* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+  (Issue #326)
+
+* All raised exceptions should now wrapped in a
+  ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+
+* All errors during a retry-enabled request should be wrapped in
+  ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+  which were previously exempt. Underlying error is accessible from the
+  ``.reason`` property. (Issue #326)
+
+* ``urllib3.exceptions.ConnectionError`` renamed to
+  ``urllib3.exceptions.ProtocolError``. (Issue #326)
+
+* Errors during response read (such as IncompleteRead) are now wrapped in
+  ``urllib3.exceptions.ProtocolError``. (Issue #418)
+
+* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+  (Issue #417)
+
+* Catch read timeouts over SSL connections as
+  ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+
+* Apply socket arguments before connecting. (Issue #427)
+
+
+1.8.3 (2014-06-23)
+------------------
+
+* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+
+* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+
+* Wrap ``socket.timeout`` exception with
+  ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+
+* Fixed proxy-related bug where connections were being reused incorrectly.
+  (Issues #366, #369)
+
+* Added ``socket_options`` keyword parameter which allows to define
+  ``setsockopt`` configuration of new sockets. (Issue #397)
+
+* Removed ``HTTPConnection.tcp_nodelay`` in favor of
+  ``HTTPConnection.default_socket_options``. (Issue #397)
+
+* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+
+
+1.8.2 (2014-04-17)
+------------------
+
+* Fix ``urllib3.util`` not being included in the package.
+
+
+1.8.1 (2014-04-17)
+------------------
+
+* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+
+* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+  for the test suite. (Issue #362)
+
+* Added support for specifying ``source_address``. (Issue #352)
+
+
+1.8 (2014-03-04)
+----------------
+
+* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+  username, and blank ports like 'hostname:').
+
+* New ``urllib3.connection`` module which contains all the HTTPConnection
+  objects.
+
+* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+  signature to a more sensible order. [Backwards incompatible]
+  (Issues #252, #262, #263)
+
+* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+
+* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+  returns the number of bytes read so far. (Issue #277)
+
+* Support for platforms without threading. (Issue #289)
+
+* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+  to allow a pool with no specified port to be considered equal to to an
+  HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+
+* Improved default SSL/TLS settings to avoid vulnerabilities.
+  (Issue #309)
+
+* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+  (Issue #310)
+
+* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+  will send the entire HTTP request ~200 milliseconds faster; however, some of
+  the resulting TCP packets will be smaller. (Issue #254)
+
+* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+  from the default 64 to 1024 in a single certificate. (Issue #318)
+
+* Headers are now passed and stored as a custom
+  ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+  (Issue #329, #333)
+
+* Headers no longer lose their case on Python 3. (Issue #236)
+
+* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+  certificates on inject. (Issue #332)
+
+* Requests with ``retries=False`` will immediately raise any exceptions without
+  wrapping them in ``MaxRetryError``. (Issue #348)
+
+* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+
+
+1.7.1 (2013-09-25)
+------------------
+
+* Added granular timeout support with new ``urllib3.util.Timeout`` class.
+  (Issue #231)
+
+* Fixed Python 3.4 support. (Issue #238)
+
+
+1.7 (2013-08-14)
+----------------
+
+* More exceptions are now pickle-able, with tests. (Issue #174)
+
+* Fixed redirecting with relative URLs in Location header. (Issue #178)
+
+* Support for relative urls in ``Location: ...`` header. (Issue #179)
+
+* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+  file-like functionality. (Issue #187)
+
+* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+  skip hostname verification for SSL connections. (Issue #194)
+
+* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+  generator wrapped around ``.read(...)``. (Issue #198)
+
+* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+
+* Fixed thread race condition in
+  ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+
+* ``ProxyManager`` requests now include non-default port in ``Host: ...``
+  header. (Issue #217)
+
+* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+
+* New ``RequestField`` object can be passed to the ``fields=...`` param which
+  can specify headers. (Issue #220)
+
+* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+  (Issue #221)
+
+* Use international headers when posting file names. (Issue #119)
+
+* Improved IPv6 support. (Issue #203)
+
+
+1.6 (2013-04-25)
+----------------
+
+* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+
+* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+
+* Improved SSL-related code. ``cert_req`` now optionally takes a string like
+  "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+  The string values reflect the suffix of the respective constant variable.
+  (Issue #130)
+
+* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+  closed proxy connections and larger read buffers. (Issue #135)
+
+* Ensure the connection is closed if no data is received, fixes connection leak
+  on some platforms. (Issue #133)
+
+* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+
+* Tests fixed to be compatible with Py26 again. (Issue #125)
+
+* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+  to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+
+* Allow an explicit content type to be specified when encoding file fields.
+  (Issue #126)
+
+* Exceptions are now pickleable, with tests. (Issue #101)
+
+* Fixed default headers not getting passed in some cases. (Issue #99)
+
+* Treat "content-encoding" header value as case-insensitive, per RFC 2616
+  Section 3.5. (Issue #110)
+
+* "Connection Refused" SocketErrors will get retried rather than raised.
+  (Issue #92)
+
+* Updated vendored ``six``, no longer overrides the global ``six`` module
+  namespace. (Issue #113)
+
+* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+  the exception that prompted the final retry. If ``reason is None`` then it
+  was due to a redirect. (Issue #92, #114)
+
+* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+  (Issue #149)
+
+* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+  that are not files. (Issue #111)
+
+* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+
+* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+  against an arbitrary hostname (when connecting by IP or for misconfigured
+  servers). (Issue #140)
+
+* Streaming decompression support. (Issue #159)
+
+
+1.5 (2012-08-02)
+----------------
+
+* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+  logging in urllib3.
+
+* Native full URL parsing (including auth, path, query, fragment) available in
+  ``urllib3.util.parse_url(url)``.
+
+* Built-in redirect will switch method to 'GET' if status code is 303.
+  (Issue #11)
+
+* ``urllib3.PoolManager`` strips the scheme and host before sending the request
+  uri. (Issue #8)
+
+* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+  based on the Content-Type header, fails.
+
+* Fixed bug with pool depletion and leaking connections (Issue #76). Added
+  explicit connection closing on pool eviction. Added
+  ``urllib3.PoolManager.clear()``.
+
+* 99% -> 100% unit test coverage.
+
+
+1.4 (2012-06-16)
+----------------
+
+* Minor AppEngine-related fixes.
+
+* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+
+* Improved url parsing. (Issue #73)
+
+* IPv6 url support. (Issue #72)
+
+
+1.3 (2012-03-25)
+----------------
+
+* Removed pre-1.0 deprecated API.
+
+* Refactored helpers into a ``urllib3.util`` submodule.
+
+* Fixed multipart encoding to support list-of-tuples for keys with multiple
+  values. (Issue #48)
+
+* Fixed multiple Set-Cookie headers in response not getting merged properly in
+  Python 3. (Issue #53)
+
+* AppEngine support with Py27. (Issue #61)
+
+* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+  bytes.
+
+
+1.2.2 (2012-02-06)
+------------------
+
+* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+
+
+1.2.1 (2012-02-05)
+------------------
+
+* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+
+* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+  which inherits from ``ValueError``.
+
+
+1.2 (2012-01-29)
+----------------
+
+* Added Python 3 support (tested on 3.2.2)
+
+* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+
+* Use ``select.poll`` instead of ``select.select`` for platforms that support
+  it.
+
+* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+  connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+
+* Fixed ``ImportError`` during install when ``ssl`` module is not available.
+  (Issue #41)
+
+* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+  completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+
+* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+  ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+  Added socket-level tests.
+
+* More tests. Achievement Unlocked: 99% Coverage.
+
+
+1.1 (2012-01-07)
+----------------
+
+* Refactored ``dummyserver`` to its own root namespace module (used for
+  testing).
+
+* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+  Py32's ``ssl_match_hostname``. (Issue #25)
+
+* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+
+* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+  #27)
+
+* Fixed timeout-related bugs. (Issues #17, #23)
+
+
+1.0.2 (2011-11-04)
+------------------
+
+* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+  you're using the object manually. (Thanks pyos)
+
+* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+  wrapping the access log in a mutex. (Thanks @christer)
+
+* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+  ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+
+
+1.0.1 (2011-10-10)
+------------------
+
+* Fixed a bug where the same connection would get returned into the pool twice,
+  causing extraneous "HttpConnectionPool is full" log warnings.
+
+
+1.0 (2011-10-08)
+----------------
+
+* Added ``PoolManager`` with LRU expiration of connections (tested and
+  documented).
+* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+  with HTTPS proxies).
+* Added optional partial-read support for responses when
+  ``preload_content=False``. You can now make requests and just read the headers
+  without loading the content.
+* Made response decoding optional (default on, same as before).
+* Added optional explicit boundary string for ``encode_multipart_formdata``.
+* Convenience request methods are now inherited from ``RequestMethods``. Old
+  helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+  the new ``request(method, url, ...)``.
+* Refactored code to be even more decoupled, reusable, and extendable.
+* License header added to ``.py`` files.
+* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+  and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+* Embettered all the things!
+* Started writing this file.
+
+
+0.4.1 (2011-07-17)
+------------------
+
+* Minor bug fixes, code cleanup.
+
+
+0.4 (2011-03-01)
+----------------
+
+* Better unicode support.
+* Added ``VerifiedHTTPSConnection``.
+* Added ``NTLMConnectionPool`` in contrib.
+* Minor improvements.
+
+
+0.3.1 (2010-07-13)
+------------------
+
+* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+
+
+0.3 (2009-12-10)
+----------------
+
+* Added HTTPS support.
+* Minor bug fixes.
+* Refactored, broken backwards compatibility with 0.2.
+* API to be treated as stable from this version forward.
+
+
+0.2 (2008-11-17)
+----------------
+
+* Added unit tests.
+* Bug fixes.
+
+
+0.1 (2008-11-16)
+----------------
+
+* First release.
+
+
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/LICENSE.txt b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/LICENSE.txt
new file mode 100644
index 00000000..429a1767
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/METADATA b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/METADATA
new file mode 100644
index 00000000..f5263b0f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/METADATA
@@ -0,0 +1,1366 @@
+Metadata-Version: 2.0
+Name: urllib3
+Version: 1.26.4
+Summary: HTTP library with thread-safe connection pooling, file post, and more.
+Home-page: https://urllib3.readthedocs.io/
+Author: Andrey Petrov
+Author-email: andrey.petrov@shazow.net
+License: MIT
+Project-URL: Documentation, https://urllib3.readthedocs.io/
+Project-URL: Code, https://github.com/urllib3/urllib3
+Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+Keywords: urllib httplib threadsafe filepost http https ssl pooling
+Platform: UNKNOWN
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Description-Content-Type: text/x-rst
+Provides-Extra: secure
+Provides-Extra: socks
+Provides-Extra: brotli
+Requires-Dist: pyOpenSSL>=0.14; extra == 'secure'
+Requires-Dist: cryptography>=1.3.4; extra == 'secure'
+Requires-Dist: idna>=2.0.0; extra == 'secure'
+Requires-Dist: certifi; extra == 'secure'
+Requires-Dist: ipaddress; python_version=="2.7" and extra == 'secure'
+Requires-Dist: PySocks>=1.5.6,<2.0,!=1.5.7; extra == 'socks'
+Requires-Dist: brotlipy>=0.6.0; extra == 'brotli'
+
+
+urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+Python ecosystem already uses urllib3 and you should too.
+urllib3 brings many critical features that are missing from the Python
+standard libraries:
+
+- Thread safety.
+- Connection pooling.
+- Client-side SSL/TLS verification.
+- File uploads with multipart encoding.
+- Helpers for retrying requests and dealing with HTTP redirects.
+- Support for gzip, deflate, and brotli encoding.
+- Proxy support for HTTP and SOCKS.
+- 100% test coverage.
+
+urllib3 is powerful and easy to use:
+
+.. code-block:: python
+
+    >>> import urllib3
+    >>> http = urllib3.PoolManager()
+    >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+    >>> r.status
+    200
+    >>> r.data
+    'User-agent: *\nDisallow: /deny\n'
+
+
+Installing
+----------
+
+urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+    $ python -m pip install urllib3
+
+Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+    $ git clone git://github.com/urllib3/urllib3.git
+    $ python setup.py install
+
+
+Documentation
+-------------
+
+urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+Contributing
+------------
+
+urllib3 happily accepts contributions. Please see our
+`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+for some tips on getting started.
+
+
+Security Disclosures
+--------------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+
+Maintainers
+-----------
+
+- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
+- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
+- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
+- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
+- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
+- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
+- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
+
+👋
+
+
+Sponsorship
+-----------
+
+If your company benefits from this library, please consider `sponsoring its
+development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
+
+
+For Enterprise
+--------------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+   :width: 75
+   :alt: Tidelift
+
+.. list-table::
+   :widths: 10 100
+
+   * - |tideliftlogo|
+     - Professional support for urllib3 is available as part of the `Tidelift
+       Subscription`_.  Tidelift gives software development teams a single source for
+       purchasing and maintaining their software, with professional grade assurances
+       from the experts who know it best, while seamlessly integrating with existing
+       tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+
+Changes
+=======
+
+1.26.4 (2021-03-15)
+-------------------
+
+* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
+  during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
+
+
+1.26.3 (2021-01-26)
+-------------------
+
+* Fixed bytes and string comparison issue with headers (Pull #2141)
+
+* Changed ``ProxySchemeUnknown`` error message to be
+  more actionable if the user supplies a proxy URL without
+  a scheme. (Pull #2107)
+
+
+1.26.2 (2020-11-12)
+-------------------
+
+* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
+  be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+
+1.26.1 (2020-11-11)
+-------------------
+
+* Fixed an issue where two ``User-Agent`` headers would be sent if a
+  ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
+
+
+1.26.0 (2020-11-10)
+-------------------
+
+* **NOTE: urllib3 v2.0 will drop support for Python 2**.
+  `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
+
+* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+  still wish to use TLS earlier than 1.2 without a deprecation warning
+  should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
+  **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
+
+* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
+  and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
+  ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
+  (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
+
+* Added default ``User-Agent`` header to every request (Pull #1750)
+
+* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``, 
+  and ``Host`` headers from being automatically emitted with requests (Pull #2018)
+
+* Collapse ``transfer-encoding: chunked`` request data and framing into
+  the same ``socket.send()`` call (Pull #1906)
+
+* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
+
+* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
+
+* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
+  to SecureTransport (Pull #1903)
+
+* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
+
+* Suppress ``BrokenPipeError`` when writing request body after the server
+  has closed the socket (Pull #1524)
+
+* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
+  into an ``urllib3.exceptions.SSLError`` (Pull #1939)
+
+
+1.25.11 (2020-10-19)
+--------------------
+
+* Fix retry backoff time parsed from ``Retry-After`` header when given
+  in the HTTP date format. The HTTP date was parsed as the local timezone
+  rather than accounting for the timezone in the HTTP date (typically
+  UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
+
+* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
+  environment variable was set to the empty string. Now ``SSLContext.keylog_file``
+  is not set in this situation (Pull #2016)
+
+
+1.25.10 (2020-07-22)
+--------------------
+
+* Added support for ``SSLKEYLOGFILE`` environment variable for
+  logging TLS session keys with use with programs like
+  Wireshark for decrypting captured web traffic (Pull #1867)
+
+* Fixed loading of SecureTransport libraries on macOS Big Sur
+  due to the new dynamic linker cache (Pull #1905)
+
+* Collapse chunked request bodies data and framing into one
+  call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
+
+* Don't insert ``None`` into ``ConnectionPool`` if the pool
+  was empty when requesting a connection (Pull #1866)
+
+* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
+
+
+1.25.9 (2020-04-16)
+-------------------
+
+* Added ``InvalidProxyConfigurationWarning`` which is raised when
+  erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+  support connecting to HTTPS proxies but will soon be able to
+  and we would like users to migrate properly without much breakage.
+
+  See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+  for more information on how to fix your proxy config. (Pull #1851)
+
+* Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+* Allow the CA certificate data to be passed as a string (Pull #1804)
+
+* Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+* Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+1.25.8 (2020-01-20)
+-------------------
+
+* Drop support for EOL Python 3.4 (Pull #1774)
+
+* Optimize _encode_invalid_chars (Pull #1787)
+
+
+1.25.7 (2019-11-11)
+-------------------
+
+* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+* Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+1.25.6 (2019-09-24)
+-------------------
+
+* Fix issue where tilde (``~``) characters were incorrectly
+  percent-encoded in the path. (Pull #1692)
+
+
+1.25.5 (2019-09-19)
+-------------------
+
+* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+  caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+  (Issue #1682)
+
+
+1.25.4 (2019-09-19)
+-------------------
+
+* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+* Fix edge case where Retry-After header was still respected even when
+  explicitly opted out of. (Pull #1607)
+
+* Remove dependency on ``rfc3986`` for URL parsing.
+
+* Fix issue where URLs containing invalid characters within ``Url.auth`` would
+  raise an exception instead of percent-encoding those characters.
+
+* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+  work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+1.25.3 (2019-05-23)
+-------------------
+
+* Change ``HTTPSConnection`` to load system CA certificates
+  when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+  unspecified. (Pull #1608, Issue #1603)
+
+* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+1.25.2 (2019-04-28)
+-------------------
+
+* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+* Change ``parse_url`` to percent-encode invalid characters within the
+  path, query, and target components. (Pull #1586)
+
+
+1.25.1 (2019-04-24)
+-------------------
+
+* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+1.25 (2019-04-22)
+-----------------
+
+* Require and validate certificates by default when using HTTPS (Pull #1507)
+
+* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+  encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+  implementations. (Pull #1496)
+
+* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
+
+* Fixed issue where OpenSSL would block if an encrypted client private key was
+  given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+* Added support for Brotli content encoding. It is enabled automatically if
+  ``brotlipy`` package is installed which can be requested with
+  ``urllib3[brotli]`` extra. (Pull #1532)
+
+* Drop ciphers using DSS key exchange from default TLS cipher suites.
+  Improve default ciphers when using SecureTransport. (Pull #1496)
+
+* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+1.24.3 (2019-05-01)
+-------------------
+
+* Apply fix for CVE-2019-9740. (Pull #1591)
+
+1.24.2 (2019-04-17)
+-------------------
+
+* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+  ``ssl_context`` parameters are specified.
+
+* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+1.24.1 (2018-11-02)
+-------------------
+
+* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+1.24 (2018-10-16)
+-----------------
+
+* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+* Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+* Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+* Change ambiguous description of backoff_factor (Pull #1436)
+
+* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+* Add a server_hostname parameter to HTTPSConnection which allows for
+  overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+* Fixed bug where responses with header Content-Type: message/* erroneously
+  raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+* Move urllib3 to src/urllib3 (Pull #1409)
+
+
+1.23 (2018-06-04)
+-----------------
+
+* Allow providing a list of headers to strip from requests when redirecting
+  to a different host. Defaults to the ``Authorization`` header. Different
+  headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+* Dropped Python 3.3 support. (Pull #1242)
+
+* Put the connection back in the pool when calling stream() or read_chunked() on
+  a chunked HEAD response. (Issue #1234)
+
+* Fixed pyOpenSSL-specific ssl client authentication issue when clients
+  attempted to auth via certificate + chain (Issue #1060)
+
+* Add the port to the connectionpool connect print (Pull #1251)
+
+* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+* Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
+1.22 (2017-07-20)
+-----------------
+
+* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+  IPv6 proxy. (Issue #1222)
+
+* Made the connection pool retry on ``SSLError``.  The original ``SSLError``
+  is available on ``MaxRetryError.reason``. (Issue #1112)
+
+* Drain and release connection before recursing on retry/redirect.  Fixes
+  deadlocks with a blocking connectionpool. (Issue #1167)
+
+* Fixed compatibility for cookiejar. (Issue #1229)
+
+* pyopenssl: Use vendored version of ``six``. (Issue #1231)
+
+
+1.21.1 (2017-05-02)
+-------------------
+
+* Fixed SecureTransport issue that would cause long delays in response body
+  delivery. (Pull #1154)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+  ``socket_options`` flag to the ``PoolManager``.  (Issue #1165)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+  ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+  (Pull #1157)
+
+
+1.21 (2017-04-25)
+-----------------
+
+* Improved performance of certain selector system calls on Python 3.5 and
+  later. (Pull #1095)
+
+* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+  exceptions appropriately when sending data. (Pull #1125)
+
+* Selectors now detects a monkey-patched select module after import for modules
+  that patch the select module like eventlet, greenlet. (Pull #1128)
+
+* Reduced memory consumption when streaming zlib-compressed responses
+  (as opposed to raw deflate streams). (Pull #1129)
+
+* Connection pools now use the entire request context when constructing the
+  pool key. (Pull #1016)
+
+* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+  ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+  (Pull #1016)
+
+* Add retry counter for ``status_forcelist``. (Issue #1147)
+
+* Added ``contrib`` module for using SecureTransport on macOS:
+  ``urllib3.contrib.securetransport``.  (Pull #1122)
+
+* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+  for schemes it does not recognise, it assumes they are case-sensitive and
+  leaves them unchanged.
+  (Issue #1080)
+
+
+1.20 (2017-01-19)
+-----------------
+
+* Added support for waiting for I/O using selectors other than select,
+  improving urllib3's behaviour with large numbers of concurrent connections.
+  (Pull #1001)
+
+* Updated the date for the system clock check. (Issue #1005)
+
+* ConnectionPools now correctly consider hostnames to be case-insensitive.
+  (Issue #1032)
+
+* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+  to fail when it is injected, rather than at first use. (Pull #1063)
+
+* Outdated versions of cryptography now cause the PyOpenSSL contrib module
+  to fail when it is injected, rather than at first use. (Issue #1044)
+
+* Automatically attempt to rewind a file-like body object when a request is
+  retried or redirected. (Pull #1039)
+
+* Fix some bugs that occur when modules incautiously patch the queue module.
+  (Pull #1061)
+
+* Prevent retries from occurring on read timeouts for which the request method
+  was not in the method whitelist. (Issue #1059)
+
+* Changed the PyOpenSSL contrib module to lazily load idna to avoid
+  unnecessarily bloating the memory of programs that don't need it. (Pull
+  #1076)
+
+* Add support for IPv6 literals with zone identifiers. (Pull #1013)
+
+* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+  proxies, and controlled remote DNS appropriately. (Issue #1035)
+
+
+1.19.1 (2016-11-16)
+-------------------
+
+* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+
+
+1.19 (2016-11-03)
+-----------------
+
+* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+  using the default retry logic. (Pull #955)
+
+* Remove markers from setup.py to assist ancient setuptools versions. (Issue
+  #986)
+
+* Disallow superscripts and other integerish things in URL ports. (Issue #989)
+
+* Allow urllib3's HTTPResponse.stream() method to continue to work with
+  non-httplib underlying FPs. (Pull #990)
+
+* Empty filenames in multipart headers are now emitted as such, rather than
+  being suppressed. (Issue #1015)
+
+* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+
+
+1.18.1 (2016-10-27)
+-------------------
+
+* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+  PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+  release fixes a vulnerability whereby urllib3 in the above configuration
+  would silently fail to validate TLS certificates due to erroneously setting
+  invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+  flags do not cause a problem in OpenSSL versions before 1.1.0, which
+  interprets the presence of any flag as requesting certificate validation.
+
+  There is no PR for this patch, as it was prepared for simultaneous disclosure
+  and release. The master branch received the same fix in Pull #1010.
+
+
+1.18 (2016-09-26)
+-----------------
+
+* Fixed incorrect message for IncompleteRead exception. (Pull #973)
+
+* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+  (Issue #258)
+
+* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+  (Issue #977)
+
+* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+
+
+1.17 (2016-09-06)
+-----------------
+
+* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+
+* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+
+* Substantially refactored documentation. (Issue #887)
+
+* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+  (Issue #858)
+
+* Normalize the scheme and host in the URL parser (Issue #833)
+
+* ``HTTPResponse`` contains the last ``Retry`` object, which now also
+  contains retries history. (Issue #848)
+
+* Timeout can no longer be set as boolean, and must be greater than zero.
+  (Pull #924)
+
+* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+  now use cryptography and idna, both of which are already dependencies of
+  PyOpenSSL. (Pull #930)
+
+* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+
+* Try to use the operating system's certificates when we are using an
+  ``SSLContext``. (Pull #941)
+
+* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+  ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
+
+* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
+
+* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
+
+* Implemented ``length_remaining`` to determine remaining content
+  to be read. (Pull #949)
+
+* Implemented ``enforce_content_length`` to enable exceptions when
+  incomplete data chunks are received. (Pull #949)
+
+* Dropped connection start, dropped connection reset, redirect, forced retry,
+  and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
+
+
+1.16 (2016-06-11)
+-----------------
+
+* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+
+* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+  overridden. (Issue #830)
+
+* Normalize scheme and host to lowercase for pool keys, and include
+  ``source_address``. (Issue #830)
+
+* Cleaner exception chain in Python 3 for ``_make_request``.
+  (Issue #861)
+
+* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+
+* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+  called by subclasses. (Issue #873)
+
+* Retain ``release_conn`` state across retries. (Issues #651, #866)
+
+* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+  ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+
+
+1.15.1 (2016-04-11)
+-------------------
+
+* Fix packaging to include backports module. (Issue #841)
+
+
+1.15 (2016-04-06)
+-----------------
+
+* Added Retry(raise_on_status=False). (Issue #720)
+
+* Always use setuptools, no more distutils fallback. (Issue #785)
+
+* Dropped support for Python 3.2. (Issue #786)
+
+* Chunked transfer encoding when requesting with ``chunked=True``.
+  (Issue #790)
+
+* Fixed regression with IPv6 port parsing. (Issue #801)
+
+* Append SNIMissingWarning messages to allow users to specify it in
+  the PYTHONWARNINGS environment variable. (Issue #816)
+
+* Handle unicode headers in Py2. (Issue #818)
+
+* Log certificate when there is a hostname mismatch. (Issue #820)
+
+* Preserve order of request/response headers. (Issue #821)
+
+
+1.14 (2015-12-29)
+-----------------
+
+* contrib: SOCKS proxy support! (Issue #762)
+
+* Fixed AppEngine handling of transfer-encoding header and bug
+  in Timeout defaults checking. (Issue #763)
+
+
+1.13.1 (2015-12-18)
+-------------------
+
+* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+1.13 (2015-12-14)
+-----------------
+
+* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+* Close connections more defensively on exception. (Issue #734)
+
+* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+  repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
+1.12 (2015-09-03)
+-----------------
+
+* Rely on ``six`` for importing ``httplib`` to work around
+  conflicts with other Python 3 shims. (Issue #688)
+
+* Add support for directories of certificate authorities, as supported by
+  OpenSSL. (Issue #701)
+
+* New exception: ``NewConnectionError``, raised when we fail to establish
+  a new connection, usually ``ECONNREFUSED`` socket error.
+
+
+1.11 (2015-07-21)
+-----------------
+
+* When ``ca_certs`` is given, ``cert_reqs`` defaults to
+  ``'CERT_REQUIRED'``. (Issue #650)
+
+* ``pip install urllib3[secure]`` will install Certifi and
+  PyOpenSSL as dependencies. (Issue #678)
+
+* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+  (Issues #632, #679)
+
+* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+  which has an ``AppEngineManager`` for using ``URLFetch`` in a
+  Google AppEngine environment. (Issue #664)
+
+* Dev: Added test suite for AppEngine. (Issue #631)
+
+* Fix performance regression when using PyOpenSSL. (Issue #626)
+
+* Passing incorrect scheme (e.g. ``foo://``) will raise
+  ``ValueError`` instead of ``AssertionError`` (backwards
+  compatible for now, but please migrate). (Issue #640)
+
+* Fix pools not getting replenished when an error occurs during a
+  request using ``release_conn=False``. (Issue #644)
+
+* Fix pool-default headers not applying for url-encoded requests
+  like GET. (Issue #657)
+
+* log.warning in Python 3 when headers are skipped due to parsing
+  errors. (Issue #642)
+
+* Close and discard connections if an error occurs during read.
+  (Issue #660)
+
+* Fix host parsing for IPv6 proxies. (Issue #668)
+
+* Separate warning type SubjectAltNameWarning, now issued once
+  per host. (Issue #671)
+
+* Fix ``httplib.IncompleteRead`` not getting converted to
+  ``ProtocolError`` when using ``HTTPResponse.stream()``
+  (Issue #674)
+
+1.10.4 (2015-05-03)
+-------------------
+
+* Migrate tests to Tornado 4. (Issue #594)
+
+* Append default warning configuration rather than overwrite.
+  (Issue #603)
+
+* Fix streaming decoding regression. (Issue #595)
+
+* Fix chunked requests losing state across keep-alive connections.
+  (Issue #599)
+
+* Fix hanging when chunked HEAD response has no body. (Issue #605)
+
+
+1.10.3 (2015-04-21)
+-------------------
+
+* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+  (Issue #558)
+
+* Fix regression of duplicate header keys being discarded.
+  (Issue #563)
+
+* ``Response.stream()`` returns a generator for chunked responses.
+  (Issue #560)
+
+* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+  (Issue #585)
+
+* Work on platforms without `ssl` module for plain HTTP requests.
+  (Issue #587)
+
+* Stop relying on the stdlib's default cipher list. (Issue #588)
+
+
+1.10.2 (2015-02-25)
+-------------------
+
+* Fix file descriptor leakage on retries. (Issue #548)
+
+* Removed RC4 from default cipher list. (Issue #551)
+
+* Header performance improvements. (Issue #544)
+
+* Fix PoolManager not obeying redirect retry settings. (Issue #553)
+
+
+1.10.1 (2015-02-10)
+-------------------
+
+* Pools can be used as context managers. (Issue #545)
+
+* Don't re-use connections which experienced an SSLError. (Issue #529)
+
+* Don't fail when gzip decoding an empty stream. (Issue #535)
+
+* Add sha256 support for fingerprint verification. (Issue #540)
+
+* Fixed handling of header values containing commas. (Issue #533)
+
+
+1.10 (2014-12-14)
+-----------------
+
+* Disabled SSLv3. (Issue #473)
+
+* Add ``Url.url`` property to return the composed url string. (Issue #394)
+
+* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+
+* ``MaxRetryError.reason`` will always be an exception, not string.
+  (Issue #481)
+
+* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+
+* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+
+* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+  (Issue #496)
+
+* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+  (Issue #499)
+
+* Close and discard sockets which experienced SSL-related errors.
+  (Issue #501)
+
+* Handle ``body`` param in ``.request(...)``. (Issue #513)
+
+* Respect timeout with HTTPS proxy. (Issue #505)
+
+* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+
+
+1.9.1 (2014-09-13)
+------------------
+
+* Apply socket arguments before binding. (Issue #427)
+
+* More careful checks if fp-like object is closed. (Issue #435)
+
+* Fixed packaging issues of some development-related files not
+  getting included. (Issue #440)
+
+* Allow performing *only* fingerprint verification. (Issue #444)
+
+* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+
+* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+
+* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+  (Issue #443)
+
+
+
+1.9 (2014-07-04)
+----------------
+
+* Shuffled around development-related files. If you're maintaining a distro
+  package of urllib3, you may need to tweak things. (Issue #415)
+
+* Unverified HTTPS requests will trigger a warning on the first request. See
+  our new `security documentation
+  <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+  (Issue #426)
+
+* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+  (Issue #326)
+
+* All raised exceptions should now wrapped in a
+  ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+
+* All errors during a retry-enabled request should be wrapped in
+  ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+  which were previously exempt. Underlying error is accessible from the
+  ``.reason`` property. (Issue #326)
+
+* ``urllib3.exceptions.ConnectionError`` renamed to
+  ``urllib3.exceptions.ProtocolError``. (Issue #326)
+
+* Errors during response read (such as IncompleteRead) are now wrapped in
+  ``urllib3.exceptions.ProtocolError``. (Issue #418)
+
+* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+  (Issue #417)
+
+* Catch read timeouts over SSL connections as
+  ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+
+* Apply socket arguments before connecting. (Issue #427)
+
+
+1.8.3 (2014-06-23)
+------------------
+
+* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+
+* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+
+* Wrap ``socket.timeout`` exception with
+  ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+
+* Fixed proxy-related bug where connections were being reused incorrectly.
+  (Issues #366, #369)
+
+* Added ``socket_options`` keyword parameter which allows to define
+  ``setsockopt`` configuration of new sockets. (Issue #397)
+
+* Removed ``HTTPConnection.tcp_nodelay`` in favor of
+  ``HTTPConnection.default_socket_options``. (Issue #397)
+
+* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+
+
+1.8.2 (2014-04-17)
+------------------
+
+* Fix ``urllib3.util`` not being included in the package.
+
+
+1.8.1 (2014-04-17)
+------------------
+
+* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+
+* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+  for the test suite. (Issue #362)
+
+* Added support for specifying ``source_address``. (Issue #352)
+
+
+1.8 (2014-03-04)
+----------------
+
+* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+  username, and blank ports like 'hostname:').
+
+* New ``urllib3.connection`` module which contains all the HTTPConnection
+  objects.
+
+* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+  signature to a more sensible order. [Backwards incompatible]
+  (Issues #252, #262, #263)
+
+* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+
+* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+  returns the number of bytes read so far. (Issue #277)
+
+* Support for platforms without threading. (Issue #289)
+
+* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+  to allow a pool with no specified port to be considered equal to to an
+  HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+
+* Improved default SSL/TLS settings to avoid vulnerabilities.
+  (Issue #309)
+
+* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+  (Issue #310)
+
+* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+  will send the entire HTTP request ~200 milliseconds faster; however, some of
+  the resulting TCP packets will be smaller. (Issue #254)
+
+* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+  from the default 64 to 1024 in a single certificate. (Issue #318)
+
+* Headers are now passed and stored as a custom
+  ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+  (Issue #329, #333)
+
+* Headers no longer lose their case on Python 3. (Issue #236)
+
+* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+  certificates on inject. (Issue #332)
+
+* Requests with ``retries=False`` will immediately raise any exceptions without
+  wrapping them in ``MaxRetryError``. (Issue #348)
+
+* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+
+
+1.7.1 (2013-09-25)
+------------------
+
+* Added granular timeout support with new ``urllib3.util.Timeout`` class.
+  (Issue #231)
+
+* Fixed Python 3.4 support. (Issue #238)
+
+
+1.7 (2013-08-14)
+----------------
+
+* More exceptions are now pickle-able, with tests. (Issue #174)
+
+* Fixed redirecting with relative URLs in Location header. (Issue #178)
+
+* Support for relative urls in ``Location: ...`` header. (Issue #179)
+
+* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+  file-like functionality. (Issue #187)
+
+* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+  skip hostname verification for SSL connections. (Issue #194)
+
+* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+  generator wrapped around ``.read(...)``. (Issue #198)
+
+* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+
+* Fixed thread race condition in
+  ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+
+* ``ProxyManager`` requests now include non-default port in ``Host: ...``
+  header. (Issue #217)
+
+* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+
+* New ``RequestField`` object can be passed to the ``fields=...`` param which
+  can specify headers. (Issue #220)
+
+* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+  (Issue #221)
+
+* Use international headers when posting file names. (Issue #119)
+
+* Improved IPv6 support. (Issue #203)
+
+
+1.6 (2013-04-25)
+----------------
+
+* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+
+* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+
+* Improved SSL-related code. ``cert_req`` now optionally takes a string like
+  "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+  The string values reflect the suffix of the respective constant variable.
+  (Issue #130)
+
+* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+  closed proxy connections and larger read buffers. (Issue #135)
+
+* Ensure the connection is closed if no data is received, fixes connection leak
+  on some platforms. (Issue #133)
+
+* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+
+* Tests fixed to be compatible with Py26 again. (Issue #125)
+
+* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+  to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+
+* Allow an explicit content type to be specified when encoding file fields.
+  (Issue #126)
+
+* Exceptions are now pickleable, with tests. (Issue #101)
+
+* Fixed default headers not getting passed in some cases. (Issue #99)
+
+* Treat "content-encoding" header value as case-insensitive, per RFC 2616
+  Section 3.5. (Issue #110)
+
+* "Connection Refused" SocketErrors will get retried rather than raised.
+  (Issue #92)
+
+* Updated vendored ``six``, no longer overrides the global ``six`` module
+  namespace. (Issue #113)
+
+* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+  the exception that prompted the final retry. If ``reason is None`` then it
+  was due to a redirect. (Issue #92, #114)
+
+* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+  (Issue #149)
+
+* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+  that are not files. (Issue #111)
+
+* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+
+* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+  against an arbitrary hostname (when connecting by IP or for misconfigured
+  servers). (Issue #140)
+
+* Streaming decompression support. (Issue #159)
+
+
+1.5 (2012-08-02)
+----------------
+
+* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+  logging in urllib3.
+
+* Native full URL parsing (including auth, path, query, fragment) available in
+  ``urllib3.util.parse_url(url)``.
+
+* Built-in redirect will switch method to 'GET' if status code is 303.
+  (Issue #11)
+
+* ``urllib3.PoolManager`` strips the scheme and host before sending the request
+  uri. (Issue #8)
+
+* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+  based on the Content-Type header, fails.
+
+* Fixed bug with pool depletion and leaking connections (Issue #76). Added
+  explicit connection closing on pool eviction. Added
+  ``urllib3.PoolManager.clear()``.
+
+* 99% -> 100% unit test coverage.
+
+
+1.4 (2012-06-16)
+----------------
+
+* Minor AppEngine-related fixes.
+
+* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+
+* Improved url parsing. (Issue #73)
+
+* IPv6 url support. (Issue #72)
+
+
+1.3 (2012-03-25)
+----------------
+
+* Removed pre-1.0 deprecated API.
+
+* Refactored helpers into a ``urllib3.util`` submodule.
+
+* Fixed multipart encoding to support list-of-tuples for keys with multiple
+  values. (Issue #48)
+
+* Fixed multiple Set-Cookie headers in response not getting merged properly in
+  Python 3. (Issue #53)
+
+* AppEngine support with Py27. (Issue #61)
+
+* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+  bytes.
+
+
+1.2.2 (2012-02-06)
+------------------
+
+* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+
+
+1.2.1 (2012-02-05)
+------------------
+
+* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+
+* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+  which inherits from ``ValueError``.
+
+
+1.2 (2012-01-29)
+----------------
+
+* Added Python 3 support (tested on 3.2.2)
+
+* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+
+* Use ``select.poll`` instead of ``select.select`` for platforms that support
+  it.
+
+* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+  connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+
+* Fixed ``ImportError`` during install when ``ssl`` module is not available.
+  (Issue #41)
+
+* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+  completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+
+* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+  ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+  Added socket-level tests.
+
+* More tests. Achievement Unlocked: 99% Coverage.
+
+
+1.1 (2012-01-07)
+----------------
+
+* Refactored ``dummyserver`` to its own root namespace module (used for
+  testing).
+
+* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+  Py32's ``ssl_match_hostname``. (Issue #25)
+
+* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+
+* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+  #27)
+
+* Fixed timeout-related bugs. (Issues #17, #23)
+
+
+1.0.2 (2011-11-04)
+------------------
+
+* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+  you're using the object manually. (Thanks pyos)
+
+* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+  wrapping the access log in a mutex. (Thanks @christer)
+
+* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+  ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+
+
+1.0.1 (2011-10-10)
+------------------
+
+* Fixed a bug where the same connection would get returned into the pool twice,
+  causing extraneous "HttpConnectionPool is full" log warnings.
+
+
+1.0 (2011-10-08)
+----------------
+
+* Added ``PoolManager`` with LRU expiration of connections (tested and
+  documented).
+* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+  with HTTPS proxies).
+* Added optional partial-read support for responses when
+  ``preload_content=False``. You can now make requests and just read the headers
+  without loading the content.
+* Made response decoding optional (default on, same as before).
+* Added optional explicit boundary string for ``encode_multipart_formdata``.
+* Convenience request methods are now inherited from ``RequestMethods``. Old
+  helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+  the new ``request(method, url, ...)``.
+* Refactored code to be even more decoupled, reusable, and extendable.
+* License header added to ``.py`` files.
+* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+  and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+* Embettered all the things!
+* Started writing this file.
+
+
+0.4.1 (2011-07-17)
+------------------
+
+* Minor bug fixes, code cleanup.
+
+
+0.4 (2011-03-01)
+----------------
+
+* Better unicode support.
+* Added ``VerifiedHTTPSConnection``.
+* Added ``NTLMConnectionPool`` in contrib.
+* Minor improvements.
+
+
+0.3.1 (2010-07-13)
+------------------
+
+* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+
+
+0.3 (2009-12-10)
+----------------
+
+* Added HTTPS support.
+* Minor bug fixes.
+* Refactored, broken backwards compatibility with 0.2.
+* API to be treated as stable from this version forward.
+
+
+0.2 (2008-11-17)
+----------------
+
+* Added unit tests.
+* Bug fixes.
+
+
+0.1 (2008-11-16)
+----------------
+
+* First release.
+
+
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/RECORD b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/RECORD
new file mode 100644
index 00000000..4be6cb09
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/RECORD
@@ -0,0 +1,86 @@
+urllib3-1.26.4.dist-info/DESCRIPTION.rst,sha256=TGbyfHL8ohj1n6jftCPkAwwOqhGhVRq9qPwuA5Y1v84,41482
+urllib3-1.26.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+urllib3-1.26.4.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
+urllib3-1.26.4.dist-info/METADATA,sha256=T12b9eyueAbfrrGnerPHjYfaeE3L2mVTjNi0hv50v-0,43414
+urllib3-1.26.4.dist-info/RECORD,,
+urllib3-1.26.4.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+urllib3-1.26.4.dist-info/metadata.json,sha256=HYlBdyY2qLAbbE07BuGX7q4HCItrXj_xAK2mkAEVKNQ,1857
+urllib3-1.26.4.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
+urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
+urllib3/__pycache__/__init__.cpython-37.pyc,,
+urllib3/__pycache__/_collections.cpython-37.pyc,,
+urllib3/__pycache__/_version.cpython-37.pyc,,
+urllib3/__pycache__/connection.cpython-37.pyc,,
+urllib3/__pycache__/connectionpool.cpython-37.pyc,,
+urllib3/__pycache__/exceptions.cpython-37.pyc,,
+urllib3/__pycache__/fields.cpython-37.pyc,,
+urllib3/__pycache__/filepost.cpython-37.pyc,,
+urllib3/__pycache__/poolmanager.cpython-37.pyc,,
+urllib3/__pycache__/request.cpython-37.pyc,,
+urllib3/__pycache__/response.cpython-37.pyc,,
+urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
+urllib3/_version.py,sha256=2Bjk_cB49921PTvereWp8ZR3NhLNoCMAyHSGP-OesLk,63
+urllib3/connection.py,sha256=q-vf_TM3MyRbZcFn3-VCKZBSf0oEhGjv7BFeZm_7kw4,18748
+urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
+urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/contrib/__pycache__/__init__.cpython-37.pyc,,
+urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,,
+urllib3/contrib/__pycache__/appengine.cpython-37.pyc,,
+urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,,
+urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,,
+urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,,
+urllib3/contrib/__pycache__/socks.cpython-37.pyc,,
+urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
+urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,,
+urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,,
+urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,,
+urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
+urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
+urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
+urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
+urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
+urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
+urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
+urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
+urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
+urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
+urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
+urllib3/packages/__pycache__/__init__.cpython-37.pyc,,
+urllib3/packages/__pycache__/six.cpython-37.pyc,,
+urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,,
+urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,,
+urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
+urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
+urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
+urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,,
+urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,,
+urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
+urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
+urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
+urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
+urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
+urllib3/util/__pycache__/__init__.cpython-37.pyc,,
+urllib3/util/__pycache__/connection.cpython-37.pyc,,
+urllib3/util/__pycache__/proxy.cpython-37.pyc,,
+urllib3/util/__pycache__/queue.cpython-37.pyc,,
+urllib3/util/__pycache__/request.cpython-37.pyc,,
+urllib3/util/__pycache__/response.cpython-37.pyc,,
+urllib3/util/__pycache__/retry.cpython-37.pyc,,
+urllib3/util/__pycache__/ssl_.cpython-37.pyc,,
+urllib3/util/__pycache__/ssltransport.cpython-37.pyc,,
+urllib3/util/__pycache__/timeout.cpython-37.pyc,,
+urllib3/util/__pycache__/url.cpython-37.pyc,,
+urllib3/util/__pycache__/wait.cpython-37.pyc,,
+urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
+urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
+urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
+urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
+urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
+urllib3/util/retry.py,sha256=s3ZNKXO6_t23ZQMg8zlu20PMSqraT495-S_mEY_19ak,21396
+urllib3/util/ssl_.py,sha256=fRqzRmoLfiDv5_tFzgFtGzlOuuHoLX0us7WUf7tYDAo,16269
+urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
+urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
+urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
+urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/WHEEL b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/WHEEL
new file mode 100644
index 00000000..9dff69d8
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/metadata.json b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/metadata.json
new file mode 100644
index 00000000..96650ff3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/metadata.json
@@ -0,0 +1 @@
+{"generator": "bdist_wheel (0.24.0)", "summary": "HTTP library with thread-safe connection pooling, file post, and more.", "metadata_version": "2.0", "name": "urllib3", "version": "1.26.4", "extensions": {"python.details": {"project_urls": {"Home": "https://urllib3.readthedocs.io/"}, "contacts": [{"email": "andrey.petrov@shazow.net", "name": "Andrey Petrov", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}}}, "license": "MIT", "project_url": "Documentation, https://urllib3.readthedocs.io/", "keywords": ["urllib", "httplib", "threadsafe", "filepost", "http", "https", "ssl", "pooling"], "classifiers": ["Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries"], "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4", "description_content_type": "text/x-rst", "extras": ["brotli", "secure", "socks"], "run_requires": [{"requires": ["pyOpenSSL>=0.14", "cryptography>=1.3.4", "idna>=2.0.0", "certifi"], "extra": "secure"}, {"requires": ["ipaddress"], "extra": "secure", "environment": "python_version==\"2.7\""}, {"requires": ["PySocks>=1.5.6,<2.0,!=1.5.7"], "extra": "socks"}, {"requires": ["brotlipy>=0.6.0"], "extra": "brotli"}]}
\ No newline at end of file
diff --git a/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/top_level.txt
new file mode 100644
index 00000000..a42590be
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3-1.26.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+urllib3
diff --git a/venv/lib/python3.7/site-packages/urllib3/__init__.py b/venv/lib/python3.7/site-packages/urllib3/__init__.py
new file mode 100644
index 00000000..fe86b59d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/__init__.py
@@ -0,0 +1,85 @@
+"""
+Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
+"""
+from __future__ import absolute_import
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+import warnings
+from logging import NullHandler
+
+from . import exceptions
+from ._version import __version__
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host
+
+__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+__license__ = "MIT"
+__version__ = __version__
+
+__all__ = (
+    "HTTPConnectionPool",
+    "HTTPSConnectionPool",
+    "PoolManager",
+    "ProxyManager",
+    "HTTPResponse",
+    "Retry",
+    "Timeout",
+    "add_stderr_logger",
+    "connection_from_url",
+    "disable_warnings",
+    "encode_multipart_formdata",
+    "get_host",
+    "make_headers",
+    "proxy_from_url",
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+    """
+    Helper for quickly adding a StreamHandler to the logger. Useful for
+    debugging.
+
+    Returns the handler after adding it.
+    """
+    # This method needs to be in this __init__.py to get the __name__ correct
+    # even if urllib3 is vendored within another package.
+    logger = logging.getLogger(__name__)
+    handler = logging.StreamHandler()
+    handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
+    logger.addHandler(handler)
+    logger.setLevel(level)
+    logger.debug("Added a stderr logging handler to logger: %s", __name__)
+    return handler
+
+
+# ... Clean up.
+del NullHandler
+
+
+# All warning filters *must* be appended unless you're really certain that they
+# shouldn't be: otherwise, it's very hard for users to use most Python
+# mechanisms to silence them.
+# SecurityWarning's always go off by default.
+warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+    """
+    Helper for quickly disabling all urllib3 warnings.
+    """
+    warnings.simplefilter("ignore", category)
diff --git a/venv/lib/python3.7/site-packages/urllib3/_collections.py b/venv/lib/python3.7/site-packages/urllib3/_collections.py
new file mode 100644
index 00000000..da9857e9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/_collections.py
@@ -0,0 +1,337 @@
+from __future__ import absolute_import
+
+try:
+    from collections.abc import Mapping, MutableMapping
+except ImportError:
+    from collections import Mapping, MutableMapping
+try:
+    from threading import RLock
+except ImportError:  # Platform-specific: No threads available
+
+    class RLock:
+        def __enter__(self):
+            pass
+
+        def __exit__(self, exc_type, exc_value, traceback):
+            pass
+
+
+from collections import OrderedDict
+
+from .exceptions import InvalidHeader
+from .packages import six
+from .packages.six import iterkeys, itervalues
+
+__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+    """
+    Provides a thread-safe dict-like container which maintains up to
+    ``maxsize`` keys while throwing away the least-recently-used keys beyond
+    ``maxsize``.
+
+    :param maxsize:
+        Maximum number of recent elements to retain.
+
+    :param dispose_func:
+        Every time an item is evicted from the container,
+        ``dispose_func(value)`` is called.  Callback which will get called
+    """
+
+    ContainerCls = OrderedDict
+
+    def __init__(self, maxsize=10, dispose_func=None):
+        self._maxsize = maxsize
+        self.dispose_func = dispose_func
+
+        self._container = self.ContainerCls()
+        self.lock = RLock()
+
+    def __getitem__(self, key):
+        # Re-insert the item, moving it to the end of the eviction line.
+        with self.lock:
+            item = self._container.pop(key)
+            self._container[key] = item
+            return item
+
+    def __setitem__(self, key, value):
+        evicted_value = _Null
+        with self.lock:
+            # Possibly evict the existing value of 'key'
+            evicted_value = self._container.get(key, _Null)
+            self._container[key] = value
+
+            # If we didn't evict an existing value, we might have to evict the
+            # least recently used item from the beginning of the container.
+            if len(self._container) > self._maxsize:
+                _key, evicted_value = self._container.popitem(last=False)
+
+        if self.dispose_func and evicted_value is not _Null:
+            self.dispose_func(evicted_value)
+
+    def __delitem__(self, key):
+        with self.lock:
+            value = self._container.pop(key)
+
+        if self.dispose_func:
+            self.dispose_func(value)
+
+    def __len__(self):
+        with self.lock:
+            return len(self._container)
+
+    def __iter__(self):
+        raise NotImplementedError(
+            "Iteration over this class is unlikely to be threadsafe."
+        )
+
+    def clear(self):
+        with self.lock:
+            # Copy pointers to all values, then wipe the mapping
+            values = list(itervalues(self._container))
+            self._container.clear()
+
+        if self.dispose_func:
+            for value in values:
+                self.dispose_func(value)
+
+    def keys(self):
+        with self.lock:
+            return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+    """
+    :param headers:
+        An iterable of field-value pairs. Must not contain multiple field names
+        when compared case-insensitively.
+
+    :param kwargs:
+        Additional field-value pairs to pass in to ``dict.update``.
+
+    A ``dict`` like container for storing HTTP Headers.
+
+    Field names are stored and compared case-insensitively in compliance with
+    RFC 7230. Iteration provides the first case-sensitive key seen for each
+    case-insensitive pair.
+
+    Using ``__setitem__`` syntax overwrites fields that compare equal
+    case-insensitively in order to maintain ``dict``'s api. For fields that
+    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+    in a loop.
+
+    If multiple fields that are equal case-insensitively are passed to the
+    constructor or ``.update``, the behavior is undefined and some will be
+    lost.
+
+    >>> headers = HTTPHeaderDict()
+    >>> headers.add('Set-Cookie', 'foo=bar')
+    >>> headers.add('set-cookie', 'baz=quxx')
+    >>> headers['content-length'] = '7'
+    >>> headers['SET-cookie']
+    'foo=bar, baz=quxx'
+    >>> headers['Content-Length']
+    '7'
+    """
+
+    def __init__(self, headers=None, **kwargs):
+        super(HTTPHeaderDict, self).__init__()
+        self._container = OrderedDict()
+        if headers is not None:
+            if isinstance(headers, HTTPHeaderDict):
+                self._copy_from(headers)
+            else:
+                self.extend(headers)
+        if kwargs:
+            self.extend(kwargs)
+
+    def __setitem__(self, key, val):
+        self._container[key.lower()] = [key, val]
+        return self._container[key.lower()]
+
+    def __getitem__(self, key):
+        val = self._container[key.lower()]
+        return ", ".join(val[1:])
+
+    def __delitem__(self, key):
+        del self._container[key.lower()]
+
+    def __contains__(self, key):
+        return key.lower() in self._container
+
+    def __eq__(self, other):
+        if not isinstance(other, Mapping) and not hasattr(other, "keys"):
+            return False
+        if not isinstance(other, type(self)):
+            other = type(self)(other)
+        return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
+            (k.lower(), v) for k, v in other.itermerged()
+        )
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    if six.PY2:  # Python 2
+        iterkeys = MutableMapping.iterkeys
+        itervalues = MutableMapping.itervalues
+
+    __marker = object()
+
+    def __len__(self):
+        return len(self._container)
+
+    def __iter__(self):
+        # Only provide the originally cased names
+        for vals in self._container.values():
+            yield vals[0]
+
+    def pop(self, key, default=__marker):
+        """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+        If key is not found, d is returned if given, otherwise KeyError is raised.
+        """
+        # Using the MutableMapping function directly fails due to the private marker.
+        # Using ordinary dict.pop would expose the internal structures.
+        # So let's reinvent the wheel.
+        try:
+            value = self[key]
+        except KeyError:
+            if default is self.__marker:
+                raise
+            return default
+        else:
+            del self[key]
+            return value
+
+    def discard(self, key):
+        try:
+            del self[key]
+        except KeyError:
+            pass
+
+    def add(self, key, val):
+        """Adds a (name, value) pair, doesn't overwrite the value if it already
+        exists.
+
+        >>> headers = HTTPHeaderDict(foo='bar')
+        >>> headers.add('Foo', 'baz')
+        >>> headers['foo']
+        'bar, baz'
+        """
+        key_lower = key.lower()
+        new_vals = [key, val]
+        # Keep the common case aka no item present as fast as possible
+        vals = self._container.setdefault(key_lower, new_vals)
+        if new_vals is not vals:
+            vals.append(val)
+
+    def extend(self, *args, **kwargs):
+        """Generic import function for any type of header-like object.
+        Adapted version of MutableMapping.update in order to insert items
+        with self.add instead of self.__setitem__
+        """
+        if len(args) > 1:
+            raise TypeError(
+                "extend() takes at most 1 positional "
+                "arguments ({0} given)".format(len(args))
+            )
+        other = args[0] if len(args) >= 1 else ()
+
+        if isinstance(other, HTTPHeaderDict):
+            for key, val in other.iteritems():
+                self.add(key, val)
+        elif isinstance(other, Mapping):
+            for key in other:
+                self.add(key, other[key])
+        elif hasattr(other, "keys"):
+            for key in other.keys():
+                self.add(key, other[key])
+        else:
+            for key, value in other:
+                self.add(key, value)
+
+        for key, value in kwargs.items():
+            self.add(key, value)
+
+    def getlist(self, key, default=__marker):
+        """Returns a list of all the values for the named field. Returns an
+        empty list if the key doesn't exist."""
+        try:
+            vals = self._container[key.lower()]
+        except KeyError:
+            if default is self.__marker:
+                return []
+            return default
+        else:
+            return vals[1:]
+
+    # Backwards compatibility for httplib
+    getheaders = getlist
+    getallmatchingheaders = getlist
+    iget = getlist
+
+    # Backwards compatibility for http.cookiejar
+    get_all = getlist
+
+    def __repr__(self):
+        return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+    def _copy_from(self, other):
+        for key in other:
+            val = other.getlist(key)
+            if isinstance(val, list):
+                # Don't need to convert tuples
+                val = list(val)
+            self._container[key.lower()] = [key] + val
+
+    def copy(self):
+        clone = type(self)()
+        clone._copy_from(self)
+        return clone
+
+    def iteritems(self):
+        """Iterate over all header lines, including duplicate ones."""
+        for key in self:
+            vals = self._container[key.lower()]
+            for val in vals[1:]:
+                yield vals[0], val
+
+    def itermerged(self):
+        """Iterate over all headers, merging duplicate ones together."""
+        for key in self:
+            val = self._container[key.lower()]
+            yield val[0], ", ".join(val[1:])
+
+    def items(self):
+        return list(self.iteritems())
+
+    @classmethod
+    def from_httplib(cls, message):  # Python 2
+        """Read headers from a Python 2 httplib message object."""
+        # python2.7 does not expose a proper API for exporting multiheaders
+        # efficiently. This function re-reads raw lines from the message
+        # object and extracts the multiheaders properly.
+        obs_fold_continued_leaders = (" ", "\t")
+        headers = []
+
+        for line in message.headers:
+            if line.startswith(obs_fold_continued_leaders):
+                if not headers:
+                    # We received a header line that starts with OWS as described
+                    # in RFC-7230 S3.2.4. This indicates a multiline header, but
+                    # there exists no previous header to which we can attach it.
+                    raise InvalidHeader(
+                        "Header continuation with no previous header: %s" % line
+                    )
+                else:
+                    key, value = headers[-1]
+                    headers[-1] = (key, value + " " + line.strip())
+                    continue
+
+            key, value = line.split(":", 1)
+            headers.append((key, value.strip()))
+
+        return cls(headers)
diff --git a/venv/lib/python3.7/site-packages/urllib3/_version.py b/venv/lib/python3.7/site-packages/urllib3/_version.py
new file mode 100644
index 00000000..97c98330
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/_version.py
@@ -0,0 +1,2 @@
+# This file is protected via CODEOWNERS
+__version__ = "1.26.4"
diff --git a/venv/lib/python3.7/site-packages/urllib3/connection.py b/venv/lib/python3.7/site-packages/urllib3/connection.py
new file mode 100644
index 00000000..45580b7e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/connection.py
@@ -0,0 +1,539 @@
+from __future__ import absolute_import
+
+import datetime
+import logging
+import os
+import re
+import socket
+import warnings
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from .packages import six
+from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
+from .packages.six.moves.http_client import HTTPException  # noqa: F401
+from .util.proxy import create_proxy_ssl_context
+
+try:  # Compiled with SSL?
+    import ssl
+
+    BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError):  # Platform-specific: No SSL.
+    ssl = None
+
+    class BaseSSLError(BaseException):
+        pass
+
+
+try:
+    # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+    ConnectionError = ConnectionError
+except NameError:
+    # Python 2
+    class ConnectionError(Exception):
+        pass
+
+
+try:  # Python 3:
+    # Not a no-op, we're adding this to the namespace so it can be imported.
+    BrokenPipeError = BrokenPipeError
+except NameError:  # Python 2:
+
+    class BrokenPipeError(Exception):
+        pass
+
+
+from ._collections import HTTPHeaderDict  # noqa (historical, removed in v2)
+from ._version import __version__
+from .exceptions import (
+    ConnectTimeoutError,
+    NewConnectionError,
+    SubjectAltNameWarning,
+    SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import CertificateError, match_hostname
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+from .util.ssl_ import (
+    assert_fingerprint,
+    create_urllib3_context,
+    resolve_cert_reqs,
+    resolve_ssl_version,
+    ssl_wrap_socket,
+)
+
+log = logging.getLogger(__name__)
+
+port_by_scheme = {"http": 80, "https": 443}
+
+# When it comes time to update this value as a part of regular maintenance
+# (ie test_recent_date is failing) update it to ~6 months before the current date.
+RECENT_DATE = datetime.date(2020, 7, 1)
+
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+
+
+class HTTPConnection(_HTTPConnection, object):
+    """
+    Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+    backwards-compatibility layer between older and newer Pythons.
+
+    Additional keyword parameters are used to configure attributes of the connection.
+    Accepted parameters include:
+
+    - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+    - ``source_address``: Set the source address for the current connection.
+    - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+      defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+      Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+      For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+      you might pass:
+
+      .. code-block:: python
+
+         HTTPConnection.default_socket_options + [
+             (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+         ]
+
+      Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+    """
+
+    default_port = port_by_scheme["http"]
+
+    #: Disable Nagle's algorithm by default.
+    #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+    default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+    #: Whether this connection verifies the host's certificate.
+    is_verified = False
+
+    def __init__(self, *args, **kw):
+        if not six.PY2:
+            kw.pop("strict", None)
+
+        # Pre-set source_address.
+        self.source_address = kw.get("source_address")
+
+        #: The socket options provided by the user. If no options are
+        #: provided, we use the default options.
+        self.socket_options = kw.pop("socket_options", self.default_socket_options)
+
+        # Proxy options provided by the user.
+        self.proxy = kw.pop("proxy", None)
+        self.proxy_config = kw.pop("proxy_config", None)
+
+        _HTTPConnection.__init__(self, *args, **kw)
+
+    @property
+    def host(self):
+        """
+        Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+        In general, SSL certificates don't include the trailing dot indicating a
+        fully-qualified domain name, and thus, they don't validate properly when
+        checked against a domain name that includes the dot. In addition, some
+        servers may not expect to receive the trailing dot when provided.
+
+        However, the hostname with trailing dot is critical to DNS resolution; doing a
+        lookup with the trailing dot will properly only resolve the appropriate FQDN,
+        whereas a lookup without a trailing dot will search the system's search domain
+        list. Thus, it's important to keep the original host around for use only in
+        those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+        actual TCP connection across which we're going to send HTTP requests).
+        """
+        return self._dns_host.rstrip(".")
+
+    @host.setter
+    def host(self, value):
+        """
+        Setter for the `host` property.
+
+        We assume that only urllib3 uses the _dns_host attribute; httplib itself
+        only uses `host`, and it seems reasonable that other libraries follow suit.
+        """
+        self._dns_host = value
+
+    def _new_conn(self):
+        """Establish a socket connection and set nodelay settings on it.
+
+        :return: New socket connection.
+        """
+        extra_kw = {}
+        if self.source_address:
+            extra_kw["source_address"] = self.source_address
+
+        if self.socket_options:
+            extra_kw["socket_options"] = self.socket_options
+
+        try:
+            conn = connection.create_connection(
+                (self._dns_host, self.port), self.timeout, **extra_kw
+            )
+
+        except SocketTimeout:
+            raise ConnectTimeoutError(
+                self,
+                "Connection to %s timed out. (connect timeout=%s)"
+                % (self.host, self.timeout),
+            )
+
+        except SocketError as e:
+            raise NewConnectionError(
+                self, "Failed to establish a new connection: %s" % e
+            )
+
+        return conn
+
+    def _is_using_tunnel(self):
+        # Google App Engine's httplib does not define _tunnel_host
+        return getattr(self, "_tunnel_host", None)
+
+    def _prepare_conn(self, conn):
+        self.sock = conn
+        if self._is_using_tunnel():
+            # TODO: Fix tunnel so it doesn't depend on self.sock state.
+            self._tunnel()
+            # Mark this connection as not reusable
+            self.auto_open = 0
+
+    def connect(self):
+        conn = self._new_conn()
+        self._prepare_conn(conn)
+
+    def putrequest(self, method, url, *args, **kwargs):
+        """"""
+        # Empty docstring because the indentation of CPython's implementation
+        # is broken but we don't want this method in our documentation.
+        match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+        if match:
+            raise ValueError(
+                "Method cannot contain non-token characters %r (found at least %r)"
+                % (method, match.group())
+            )
+
+        return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+
+    def putheader(self, header, *values):
+        """"""
+        if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
+            _HTTPConnection.putheader(self, header, *values)
+        elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
+            raise ValueError(
+                "urllib3.util.SKIP_HEADER only supports '%s'"
+                % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
+            )
+
+    def request(self, method, url, body=None, headers=None):
+        if headers is None:
+            headers = {}
+        else:
+            # Avoid modifying the headers passed into .request()
+            headers = headers.copy()
+        if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
+            headers["User-Agent"] = _get_default_user_agent()
+        super(HTTPConnection, self).request(method, url, body=body, headers=headers)
+
+    def request_chunked(self, method, url, body=None, headers=None):
+        """
+        Alternative to the common request method, which sends the
+        body with chunked encoding and not as one block
+        """
+        headers = headers or {}
+        header_keys = set([six.ensure_str(k.lower()) for k in headers])
+        skip_accept_encoding = "accept-encoding" in header_keys
+        skip_host = "host" in header_keys
+        self.putrequest(
+            method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+        )
+        if "user-agent" not in header_keys:
+            self.putheader("User-Agent", _get_default_user_agent())
+        for header, value in headers.items():
+            self.putheader(header, value)
+        if "transfer-encoding" not in headers:
+            self.putheader("Transfer-Encoding", "chunked")
+        self.endheaders()
+
+        if body is not None:
+            stringish_types = six.string_types + (bytes,)
+            if isinstance(body, stringish_types):
+                body = (body,)
+            for chunk in body:
+                if not chunk:
+                    continue
+                if not isinstance(chunk, bytes):
+                    chunk = chunk.encode("utf8")
+                len_str = hex(len(chunk))[2:]
+                to_send = bytearray(len_str.encode())
+                to_send += b"\r\n"
+                to_send += chunk
+                to_send += b"\r\n"
+                self.send(to_send)
+
+        # After the if clause, to always have a closed body
+        self.send(b"0\r\n\r\n")
+
+
+class HTTPSConnection(HTTPConnection):
+    """
+    Many of the parameters to this constructor are passed to the underlying SSL
+    socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
+    """
+
+    default_port = port_by_scheme["https"]
+
+    cert_reqs = None
+    ca_certs = None
+    ca_cert_dir = None
+    ca_cert_data = None
+    ssl_version = None
+    assert_fingerprint = None
+    tls_in_tls_required = False
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        key_file=None,
+        cert_file=None,
+        key_password=None,
+        strict=None,
+        timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+        ssl_context=None,
+        server_hostname=None,
+        **kw
+    ):
+
+        HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
+
+        self.key_file = key_file
+        self.cert_file = cert_file
+        self.key_password = key_password
+        self.ssl_context = ssl_context
+        self.server_hostname = server_hostname
+
+        # Required property for Google AppEngine 1.9.0 which otherwise causes
+        # HTTPS requests to go out as HTTP. (See Issue #356)
+        self._protocol = "https"
+
+    def set_cert(
+        self,
+        key_file=None,
+        cert_file=None,
+        cert_reqs=None,
+        key_password=None,
+        ca_certs=None,
+        assert_hostname=None,
+        assert_fingerprint=None,
+        ca_cert_dir=None,
+        ca_cert_data=None,
+    ):
+        """
+        This method should only be called once, before the connection is used.
+        """
+        # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+        # have an SSLContext object in which case we'll use its verify_mode.
+        if cert_reqs is None:
+            if self.ssl_context is not None:
+                cert_reqs = self.ssl_context.verify_mode
+            else:
+                cert_reqs = resolve_cert_reqs(None)
+
+        self.key_file = key_file
+        self.cert_file = cert_file
+        self.cert_reqs = cert_reqs
+        self.key_password = key_password
+        self.assert_hostname = assert_hostname
+        self.assert_fingerprint = assert_fingerprint
+        self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+        self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+        self.ca_cert_data = ca_cert_data
+
+    def connect(self):
+        # Add certificate verification
+        conn = self._new_conn()
+        hostname = self.host
+        tls_in_tls = False
+
+        if self._is_using_tunnel():
+            if self.tls_in_tls_required:
+                conn = self._connect_tls_proxy(hostname, conn)
+                tls_in_tls = True
+
+            self.sock = conn
+
+            # Calls self._set_hostport(), so self.host is
+            # self._tunnel_host below.
+            self._tunnel()
+            # Mark this connection as not reusable
+            self.auto_open = 0
+
+            # Override the host with the one we're requesting data from.
+            hostname = self._tunnel_host
+
+        server_hostname = hostname
+        if self.server_hostname is not None:
+            server_hostname = self.server_hostname
+
+        is_time_off = datetime.date.today() < RECENT_DATE
+        if is_time_off:
+            warnings.warn(
+                (
+                    "System time is way off (before {0}). This will probably "
+                    "lead to SSL verification errors"
+                ).format(RECENT_DATE),
+                SystemTimeWarning,
+            )
+
+        # Wrap socket using verification with the root certs in
+        # trusted_root_certs
+        default_ssl_context = False
+        if self.ssl_context is None:
+            default_ssl_context = True
+            self.ssl_context = create_urllib3_context(
+                ssl_version=resolve_ssl_version(self.ssl_version),
+                cert_reqs=resolve_cert_reqs(self.cert_reqs),
+            )
+
+        context = self.ssl_context
+        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
+
+        # Try to load OS default certs if none are given.
+        # Works well on Windows (requires Python3.4+)
+        if (
+            not self.ca_certs
+            and not self.ca_cert_dir
+            and not self.ca_cert_data
+            and default_ssl_context
+            and hasattr(context, "load_default_certs")
+        ):
+            context.load_default_certs()
+
+        self.sock = ssl_wrap_socket(
+            sock=conn,
+            keyfile=self.key_file,
+            certfile=self.cert_file,
+            key_password=self.key_password,
+            ca_certs=self.ca_certs,
+            ca_cert_dir=self.ca_cert_dir,
+            ca_cert_data=self.ca_cert_data,
+            server_hostname=server_hostname,
+            ssl_context=context,
+            tls_in_tls=tls_in_tls,
+        )
+
+        # If we're using all defaults and the connection
+        # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
+        # for the host.
+        if (
+            default_ssl_context
+            and self.ssl_version is None
+            and hasattr(self.sock, "version")
+            and self.sock.version() in {"TLSv1", "TLSv1.1"}
+        ):
+            warnings.warn(
+                "Negotiating TLSv1/TLSv1.1 by default is deprecated "
+                "and will be disabled in urllib3 v2.0.0. Connecting to "
+                "'%s' with '%s' can be enabled by explicitly opting-in "
+                "with 'ssl_version'" % (self.host, self.sock.version()),
+                DeprecationWarning,
+            )
+
+        if self.assert_fingerprint:
+            assert_fingerprint(
+                self.sock.getpeercert(binary_form=True), self.assert_fingerprint
+            )
+        elif (
+            context.verify_mode != ssl.CERT_NONE
+            and not getattr(context, "check_hostname", False)
+            and self.assert_hostname is not False
+        ):
+            # While urllib3 attempts to always turn off hostname matching from
+            # the TLS library, this cannot always be done. So we check whether
+            # the TLS Library still thinks it's matching hostnames.
+            cert = self.sock.getpeercert()
+            if not cert.get("subjectAltName", ()):
+                warnings.warn(
+                    (
+                        "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+                        "`commonName` for now. This feature is being removed by major browsers and "
+                        "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+                        "for details.)".format(hostname)
+                    ),
+                    SubjectAltNameWarning,
+                )
+            _match_hostname(cert, self.assert_hostname or server_hostname)
+
+        self.is_verified = (
+            context.verify_mode == ssl.CERT_REQUIRED
+            or self.assert_fingerprint is not None
+        )
+
+    def _connect_tls_proxy(self, hostname, conn):
+        """
+        Establish a TLS connection to the proxy using the provided SSL context.
+        """
+        proxy_config = self.proxy_config
+        ssl_context = proxy_config.ssl_context
+        if ssl_context:
+            # If the user provided a proxy context, we assume CA and client
+            # certificates have already been set
+            return ssl_wrap_socket(
+                sock=conn,
+                server_hostname=hostname,
+                ssl_context=ssl_context,
+            )
+
+        ssl_context = create_proxy_ssl_context(
+            self.ssl_version,
+            self.cert_reqs,
+            self.ca_certs,
+            self.ca_cert_dir,
+            self.ca_cert_data,
+        )
+        # By default urllib3's SSLContext disables `check_hostname` and uses
+        # a custom check. For proxies we're good with relying on the default
+        # verification.
+        ssl_context.check_hostname = True
+
+        # If no cert was provided, use only the default options for server
+        # certificate validation
+        return ssl_wrap_socket(
+            sock=conn,
+            ca_certs=self.ca_certs,
+            ca_cert_dir=self.ca_cert_dir,
+            ca_cert_data=self.ca_cert_data,
+            server_hostname=hostname,
+            ssl_context=ssl_context,
+        )
+
+
+def _match_hostname(cert, asserted_hostname):
+    try:
+        match_hostname(cert, asserted_hostname)
+    except CertificateError as e:
+        log.warning(
+            "Certificate did not match expected hostname: %s. Certificate: %s",
+            asserted_hostname,
+            cert,
+        )
+        # Add cert to exception and reraise so client code can inspect
+        # the cert when catching the exception, if they want to
+        e._peer_cert = cert
+        raise
+
+
+def _get_default_user_agent():
+    return "python-urllib3/%s" % __version__
+
+
+class DummyConnection(object):
+    """Used to detect a failed ConnectionCls import."""
+
+    pass
+
+
+if not ssl:
+    HTTPSConnection = DummyConnection  # noqa: F811
+
+
+VerifiedHTTPSConnection = HTTPSConnection
diff --git a/venv/lib/python3.7/site-packages/urllib3/connectionpool.py b/venv/lib/python3.7/site-packages/urllib3/connectionpool.py
new file mode 100644
index 00000000..4708c5bf
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/connectionpool.py
@@ -0,0 +1,1067 @@
+from __future__ import absolute_import
+
+import errno
+import logging
+import socket
+import sys
+import warnings
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from .connection import (
+    BaseSSLError,
+    BrokenPipeError,
+    DummyConnection,
+    HTTPConnection,
+    HTTPException,
+    HTTPSConnection,
+    VerifiedHTTPSConnection,
+    port_by_scheme,
+)
+from .exceptions import (
+    ClosedPoolError,
+    EmptyPoolError,
+    HeaderParsingError,
+    HostChangedError,
+    InsecureRequestWarning,
+    LocationValueError,
+    MaxRetryError,
+    NewConnectionError,
+    ProtocolError,
+    ProxyError,
+    ReadTimeoutError,
+    SSLError,
+    TimeoutError,
+)
+from .packages import six
+from .packages.six.moves import queue
+from .packages.ssl_match_hostname import CertificateError
+from .request import RequestMethods
+from .response import HTTPResponse
+from .util.connection import is_connection_dropped
+from .util.proxy import connection_requires_http_tunnel
+from .util.queue import LifoQueue
+from .util.request import set_file_position
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import Url, _encode_target
+from .util.url import _normalize_host as normalize_host
+from .util.url import get_host, parse_url
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+    """
+    Base class for all connection pools, such as
+    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+
+    .. note::
+       ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+       which is useful if your target server doesn't support percent-encoded
+       target URIs.
+    """
+
+    scheme = None
+    QueueCls = LifoQueue
+
+    def __init__(self, host, port=None):
+        if not host:
+            raise LocationValueError("No host specified.")
+
+        self.host = _normalize_host(host, scheme=self.scheme)
+        self._proxy_host = host.lower()
+        self.port = port
+
+    def __str__(self):
+        return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.close()
+        # Return False to re-raise any potential exceptions
+        return False
+
+    def close(self):
+        """
+        Close all pooled connections and disable the pool.
+        """
+        pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+    """
+    Thread-safe connection pool for one host.
+
+    :param host:
+        Host used for this HTTP Connection (e.g. "localhost"), passed into
+        :class:`http.client.HTTPConnection`.
+
+    :param port:
+        Port used for this HTTP Connection (None is equivalent to 80), passed
+        into :class:`http.client.HTTPConnection`.
+
+    :param strict:
+        Causes BadStatusLine to be raised if the status line can't be parsed
+        as a valid HTTP/1.0 or 1.1 status line, passed into
+        :class:`http.client.HTTPConnection`.
+
+        .. note::
+           Only works in Python 2. This parameter is ignored in Python 3.
+
+    :param timeout:
+        Socket timeout in seconds for each individual connection. This can
+        be a float or integer, which sets the timeout for the HTTP request,
+        or an instance of :class:`urllib3.util.Timeout` which gives you more
+        fine-grained control over request timeouts. After the constructor has
+        been parsed, this is always a `urllib3.util.Timeout` object.
+
+    :param maxsize:
+        Number of connections to save that can be reused. More than 1 is useful
+        in multithreaded situations. If ``block`` is set to False, more
+        connections will be created but they will not be saved once they've
+        been used.
+
+    :param block:
+        If set to True, no more than ``maxsize`` connections will be used at
+        a time. When no free connections are available, the call will block
+        until a connection has been released. This is a useful side effect for
+        particular multithreaded situations where one does not want to use more
+        than maxsize connections per host to prevent flooding.
+
+    :param headers:
+        Headers to include with all requests, unless other headers are given
+        explicitly.
+
+    :param retries:
+        Retry configuration to use by default with requests in this pool.
+
+    :param _proxy:
+        Parsed proxy URL, should not be used directly, instead, see
+        :class:`urllib3.ProxyManager`
+
+    :param _proxy_headers:
+        A dictionary with proxy headers, should not be used directly,
+        instead, see :class:`urllib3.ProxyManager`
+
+    :param \\**conn_kw:
+        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+        :class:`urllib3.connection.HTTPSConnection` instances.
+    """
+
+    scheme = "http"
+    ConnectionCls = HTTPConnection
+    ResponseCls = HTTPResponse
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        strict=False,
+        timeout=Timeout.DEFAULT_TIMEOUT,
+        maxsize=1,
+        block=False,
+        headers=None,
+        retries=None,
+        _proxy=None,
+        _proxy_headers=None,
+        _proxy_config=None,
+        **conn_kw
+    ):
+        ConnectionPool.__init__(self, host, port)
+        RequestMethods.__init__(self, headers)
+
+        self.strict = strict
+
+        if not isinstance(timeout, Timeout):
+            timeout = Timeout.from_float(timeout)
+
+        if retries is None:
+            retries = Retry.DEFAULT
+
+        self.timeout = timeout
+        self.retries = retries
+
+        self.pool = self.QueueCls(maxsize)
+        self.block = block
+
+        self.proxy = _proxy
+        self.proxy_headers = _proxy_headers or {}
+        self.proxy_config = _proxy_config
+
+        # Fill the queue up so that doing get() on it will block properly
+        for _ in xrange(maxsize):
+            self.pool.put(None)
+
+        # These are mostly for testing and debugging purposes.
+        self.num_connections = 0
+        self.num_requests = 0
+        self.conn_kw = conn_kw
+
+        if self.proxy:
+            # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+            # We cannot know if the user has added default socket options, so we cannot replace the
+            # list.
+            self.conn_kw.setdefault("socket_options", [])
+
+            self.conn_kw["proxy"] = self.proxy
+            self.conn_kw["proxy_config"] = self.proxy_config
+
+    def _new_conn(self):
+        """
+        Return a fresh :class:`HTTPConnection`.
+        """
+        self.num_connections += 1
+        log.debug(
+            "Starting new HTTP connection (%d): %s:%s",
+            self.num_connections,
+            self.host,
+            self.port or "80",
+        )
+
+        conn = self.ConnectionCls(
+            host=self.host,
+            port=self.port,
+            timeout=self.timeout.connect_timeout,
+            strict=self.strict,
+            **self.conn_kw
+        )
+        return conn
+
+    def _get_conn(self, timeout=None):
+        """
+        Get a connection. Will return a pooled connection if one is available.
+
+        If no connections are available and :prop:`.block` is ``False``, then a
+        fresh connection is returned.
+
+        :param timeout:
+            Seconds to wait before giving up and raising
+            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+            :prop:`.block` is ``True``.
+        """
+        conn = None
+        try:
+            conn = self.pool.get(block=self.block, timeout=timeout)
+
+        except AttributeError:  # self.pool is None
+            raise ClosedPoolError(self, "Pool is closed.")
+
+        except queue.Empty:
+            if self.block:
+                raise EmptyPoolError(
+                    self,
+                    "Pool reached maximum size and no more connections are allowed.",
+                )
+            pass  # Oh well, we'll create a new connection then
+
+        # If this is a persistent connection, check if it got disconnected
+        if conn and is_connection_dropped(conn):
+            log.debug("Resetting dropped connection: %s", self.host)
+            conn.close()
+            if getattr(conn, "auto_open", 1) == 0:
+                # This is a proxied connection that has been mutated by
+                # http.client._tunnel() and cannot be reused (since it would
+                # attempt to bypass the proxy)
+                conn = None
+
+        return conn or self._new_conn()
+
+    def _put_conn(self, conn):
+        """
+        Put a connection back into the pool.
+
+        :param conn:
+            Connection object for the current host and port as returned by
+            :meth:`._new_conn` or :meth:`._get_conn`.
+
+        If the pool is already full, the connection is closed and discarded
+        because we exceeded maxsize. If connections are discarded frequently,
+        then maxsize should be increased.
+
+        If the pool is closed, then the connection will be closed and discarded.
+        """
+        try:
+            self.pool.put(conn, block=False)
+            return  # Everything is dandy, done.
+        except AttributeError:
+            # self.pool is None.
+            pass
+        except queue.Full:
+            # This should never happen if self.block == True
+            log.warning("Connection pool is full, discarding connection: %s", self.host)
+
+        # Connection never got put back into the pool, close it.
+        if conn:
+            conn.close()
+
+    def _validate_conn(self, conn):
+        """
+        Called right before a request is made, after the socket is created.
+        """
+        pass
+
+    def _prepare_proxy(self, conn):
+        # Nothing to do for HTTP connections.
+        pass
+
+    def _get_timeout(self, timeout):
+        """ Helper that always returns a :class:`urllib3.util.Timeout` """
+        if timeout is _Default:
+            return self.timeout.clone()
+
+        if isinstance(timeout, Timeout):
+            return timeout.clone()
+        else:
+            # User passed us an int/float. This is for backwards compatibility,
+            # can be removed later
+            return Timeout.from_float(timeout)
+
+    def _raise_timeout(self, err, url, timeout_value):
+        """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+        if isinstance(err, SocketTimeout):
+            raise ReadTimeoutError(
+                self, url, "Read timed out. (read timeout=%s)" % timeout_value
+            )
+
+        # See the above comment about EAGAIN in Python 3. In Python 2 we have
+        # to specifically catch it and throw the timeout error
+        if hasattr(err, "errno") and err.errno in _blocking_errnos:
+            raise ReadTimeoutError(
+                self, url, "Read timed out. (read timeout=%s)" % timeout_value
+            )
+
+        # Catch possible read timeouts thrown as SSL errors. If not the
+        # case, rethrow the original. We need to do this because of:
+        # http://bugs.python.org/issue10272
+        if "timed out" in str(err) or "did not complete (read)" in str(
+            err
+        ):  # Python < 2.7.4
+            raise ReadTimeoutError(
+                self, url, "Read timed out. (read timeout=%s)" % timeout_value
+            )
+
+    def _make_request(
+        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
+    ):
+        """
+        Perform a request on a given urllib connection object taken from our
+        pool.
+
+        :param conn:
+            a connection from one of our connection pools
+
+        :param timeout:
+            Socket timeout in seconds for the request. This can be a
+            float or integer, which will set the same timeout value for
+            the socket connect and the socket read, or an instance of
+            :class:`urllib3.util.Timeout`, which gives you more fine-grained
+            control over your timeouts.
+        """
+        self.num_requests += 1
+
+        timeout_obj = self._get_timeout(timeout)
+        timeout_obj.start_connect()
+        conn.timeout = timeout_obj.connect_timeout
+
+        # Trigger any extra validation we need to do.
+        try:
+            self._validate_conn(conn)
+        except (SocketTimeout, BaseSSLError) as e:
+            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+            raise
+
+        # conn.request() calls http.client.*.request, not the method in
+        # urllib3.request. It also calls makefile (recv) on the socket.
+        try:
+            if chunked:
+                conn.request_chunked(method, url, **httplib_request_kw)
+            else:
+                conn.request(method, url, **httplib_request_kw)
+
+        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
+        # legitimately able to close the connection after sending a valid response.
+        # With this behaviour, the received response is still readable.
+        except BrokenPipeError:
+            # Python 3
+            pass
+        except IOError as e:
+            # Python 2 and macOS/Linux
+            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
+            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+            if e.errno not in {
+                errno.EPIPE,
+                errno.ESHUTDOWN,
+                errno.EPROTOTYPE,
+            }:
+                raise
+
+        # Reset the timeout for the recv() on the socket
+        read_timeout = timeout_obj.read_timeout
+
+        # App Engine doesn't have a sock attr
+        if getattr(conn, "sock", None):
+            # In Python 3 socket.py will catch EAGAIN and return None when you
+            # try and read into the file pointer created by http.client, which
+            # instead raises a BadStatusLine exception. Instead of catching
+            # the exception and assuming all BadStatusLine exceptions are read
+            # timeouts, check for a zero timeout before making the request.
+            if read_timeout == 0:
+                raise ReadTimeoutError(
+                    self, url, "Read timed out. (read timeout=%s)" % read_timeout
+                )
+            if read_timeout is Timeout.DEFAULT_TIMEOUT:
+                conn.sock.settimeout(socket.getdefaulttimeout())
+            else:  # None or a value
+                conn.sock.settimeout(read_timeout)
+
+        # Receive the response from the server
+        try:
+            try:
+                # Python 2.7, use buffering of HTTP responses
+                httplib_response = conn.getresponse(buffering=True)
+            except TypeError:
+                # Python 3
+                try:
+                    httplib_response = conn.getresponse()
+                except BaseException as e:
+                    # Remove the TypeError from the exception chain in
+                    # Python 3 (including for exceptions like SystemExit).
+                    # Otherwise it looks like a bug in the code.
+                    six.raise_from(e, None)
+        except (SocketTimeout, BaseSSLError, SocketError) as e:
+            self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+            raise
+
+        # AppEngine doesn't have a version attr.
+        http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
+        log.debug(
+            '%s://%s:%s "%s %s %s" %s %s',
+            self.scheme,
+            self.host,
+            self.port,
+            method,
+            url,
+            http_version,
+            httplib_response.status,
+            httplib_response.length,
+        )
+
+        try:
+            assert_header_parsing(httplib_response.msg)
+        except (HeaderParsingError, TypeError) as hpe:  # Platform-specific: Python 3
+            log.warning(
+                "Failed to parse headers (url=%s): %s",
+                self._absolute_url(url),
+                hpe,
+                exc_info=True,
+            )
+
+        return httplib_response
+
+    def _absolute_url(self, path):
+        return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+    def close(self):
+        """
+        Close all pooled connections and disable the pool.
+        """
+        if self.pool is None:
+            return
+        # Disable access to the pool
+        old_pool, self.pool = self.pool, None
+
+        try:
+            while True:
+                conn = old_pool.get(block=False)
+                if conn:
+                    conn.close()
+
+        except queue.Empty:
+            pass  # Done.
+
+    def is_same_host(self, url):
+        """
+        Check if the given ``url`` is a member of the same host as this
+        connection pool.
+        """
+        if url.startswith("/"):
+            return True
+
+        # TODO: Add optional support for socket.gethostbyname checking.
+        scheme, host, port = get_host(url)
+        if host is not None:
+            host = _normalize_host(host, scheme=scheme)
+
+        # Use explicit default port for comparison when none is given
+        if self.port and not port:
+            port = port_by_scheme.get(scheme)
+        elif not self.port and port == port_by_scheme.get(scheme):
+            port = None
+
+        return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+    def urlopen(
+        self,
+        method,
+        url,
+        body=None,
+        headers=None,
+        retries=None,
+        redirect=True,
+        assert_same_host=True,
+        timeout=_Default,
+        pool_timeout=None,
+        release_conn=None,
+        chunked=False,
+        body_pos=None,
+        **response_kw
+    ):
+        """
+        Get a connection from the pool and perform an HTTP request. This is the
+        lowest level call for making a request, so you'll need to specify all
+        the raw details.
+
+        .. note::
+
+           More commonly, it's appropriate to use a convenience method provided
+           by :class:`.RequestMethods`, such as :meth:`request`.
+
+        .. note::
+
+           `release_conn` will only behave as expected if
+           `preload_content=False` because we want to make
+           `preload_content=False` the default behaviour someday soon without
+           breaking backwards compatibility.
+
+        :param method:
+            HTTP request method (such as GET, POST, PUT, etc.)
+
+        :param url:
+            The URL to perform the request on.
+
+        :param body:
+            Data to send in the request body, either :class:`str`, :class:`bytes`,
+            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+        :param headers:
+            Dictionary of custom headers to send, such as User-Agent,
+            If-None-Match, etc. If None, pool headers are used. If provided,
+            these headers completely replace any pool-specific headers.
+
+        :param retries:
+            Configure the number of retries to allow before raising a
+            :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+            Pass ``None`` to retry until you receive a response. Pass a
+            :class:`~urllib3.util.retry.Retry` object for fine-grained control
+            over different types of retries.
+            Pass an integer number to retry connection errors that many times,
+            but no other types of errors. Pass zero to never retry.
+
+            If ``False``, then retries are disabled and any exception is raised
+            immediately. Also, instead of raising a MaxRetryError on redirects,
+            the redirect response will be returned.
+
+        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+        :param redirect:
+            If True, automatically handle redirects (status codes 301, 302,
+            303, 307, 308). Each redirect counts as a retry. Disabling retries
+            will disable redirect, too.
+
+        :param assert_same_host:
+            If ``True``, will make sure that the host of the pool requests is
+            consistent else will raise HostChangedError. When ``False``, you can
+            use the pool on an HTTP proxy and request foreign hosts.
+
+        :param timeout:
+            If specified, overrides the default timeout for this one
+            request. It may be a float (in seconds) or an instance of
+            :class:`urllib3.util.Timeout`.
+
+        :param pool_timeout:
+            If set and the pool is set to block=True, then this method will
+            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+            connection is available within the time period.
+
+        :param release_conn:
+            If False, then the urlopen call will not release the connection
+            back into the pool once a response is received (but will release if
+            you read the entire contents of the response such as when
+            `preload_content=True`). This is useful if you're not preloading
+            the response's content immediately. You will need to call
+            ``r.release_conn()`` on the response ``r`` to return the connection
+            back into the pool. If None, it takes the value of
+            ``response_kw.get('preload_content', True)``.
+
+        :param chunked:
+            If True, urllib3 will send the body using chunked transfer
+            encoding. Otherwise, urllib3 will send the body using the standard
+            content-length form. Defaults to False.
+
+        :param int body_pos:
+            Position to seek to in file-like body in the event of a retry or
+            redirect. Typically this won't need to be set because urllib3 will
+            auto-populate the value when needed.
+
+        :param \\**response_kw:
+            Additional parameters are passed to
+            :meth:`urllib3.response.HTTPResponse.from_httplib`
+        """
+
+        parsed_url = parse_url(url)
+        destination_scheme = parsed_url.scheme
+
+        if headers is None:
+            headers = self.headers
+
+        if not isinstance(retries, Retry):
+            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+        if release_conn is None:
+            release_conn = response_kw.get("preload_content", True)
+
+        # Check host
+        if assert_same_host and not self.is_same_host(url):
+            raise HostChangedError(self, url, retries)
+
+        # Ensure that the URL we're connecting to is properly encoded
+        if url.startswith("/"):
+            url = six.ensure_str(_encode_target(url))
+        else:
+            url = six.ensure_str(parsed_url.url)
+
+        conn = None
+
+        # Track whether `conn` needs to be released before
+        # returning/raising/recursing. Update this variable if necessary, and
+        # leave `release_conn` constant throughout the function. That way, if
+        # the function recurses, the original value of `release_conn` will be
+        # passed down into the recursive call, and its value will be respected.
+        #
+        # See issue #651 [1] for details.
+        #
+        # [1] <https://github.com/urllib3/urllib3/issues/651>
+        release_this_conn = release_conn
+
+        http_tunnel_required = connection_requires_http_tunnel(
+            self.proxy, self.proxy_config, destination_scheme
+        )
+
+        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
+        # have to copy the headers dict so we can safely change it without those
+        # changes being reflected in anyone else's copy.
+        if not http_tunnel_required:
+            headers = headers.copy()
+            headers.update(self.proxy_headers)
+
+        # Must keep the exception bound to a separate variable or else Python 3
+        # complains about UnboundLocalError.
+        err = None
+
+        # Keep track of whether we cleanly exited the except block. This
+        # ensures we do proper cleanup in finally.
+        clean_exit = False
+
+        # Rewind body position, if needed. Record current position
+        # for future rewinds in the event of a redirect/retry.
+        body_pos = set_file_position(body, body_pos)
+
+        try:
+            # Request a connection from the queue.
+            timeout_obj = self._get_timeout(timeout)
+            conn = self._get_conn(timeout=pool_timeout)
+
+            conn.timeout = timeout_obj.connect_timeout
+
+            is_new_proxy_conn = self.proxy is not None and not getattr(
+                conn, "sock", None
+            )
+            if is_new_proxy_conn and http_tunnel_required:
+                self._prepare_proxy(conn)
+
+            # Make the request on the httplib connection object.
+            httplib_response = self._make_request(
+                conn,
+                method,
+                url,
+                timeout=timeout_obj,
+                body=body,
+                headers=headers,
+                chunked=chunked,
+            )
+
+            # If we're going to release the connection in ``finally:``, then
+            # the response doesn't need to know about the connection. Otherwise
+            # it will also try to release it and we'll have a double-release
+            # mess.
+            response_conn = conn if not release_conn else None
+
+            # Pass method to Response for length checking
+            response_kw["request_method"] = method
+
+            # Import httplib's response into our own wrapper object
+            response = self.ResponseCls.from_httplib(
+                httplib_response,
+                pool=self,
+                connection=response_conn,
+                retries=retries,
+                **response_kw
+            )
+
+            # Everything went great!
+            clean_exit = True
+
+        except EmptyPoolError:
+            # Didn't get a connection from the pool, no need to clean up
+            clean_exit = True
+            release_this_conn = False
+            raise
+
+        except (
+            TimeoutError,
+            HTTPException,
+            SocketError,
+            ProtocolError,
+            BaseSSLError,
+            SSLError,
+            CertificateError,
+        ) as e:
+            # Discard the connection for these exceptions. It will be
+            # replaced during the next _get_conn() call.
+            clean_exit = False
+            if isinstance(e, (BaseSSLError, CertificateError)):
+                e = SSLError(e)
+            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+                e = ProxyError("Cannot connect to proxy.", e)
+            elif isinstance(e, (SocketError, HTTPException)):
+                e = ProtocolError("Connection aborted.", e)
+
+            retries = retries.increment(
+                method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
+            )
+            retries.sleep()
+
+            # Keep track of the error for the retry warning.
+            err = e
+
+        finally:
+            if not clean_exit:
+                # We hit some kind of exception, handled or otherwise. We need
+                # to throw the connection away unless explicitly told not to.
+                # Close the connection, set the variable to None, and make sure
+                # we put the None back in the pool to avoid leaking it.
+                conn = conn and conn.close()
+                release_this_conn = True
+
+            if release_this_conn:
+                # Put the connection back to be reused. If the connection is
+                # expired then it will be None, which will get replaced with a
+                # fresh connection during _get_conn.
+                self._put_conn(conn)
+
+        if not conn:
+            # Try again
+            log.warning(
+                "Retrying (%r) after connection broken by '%r': %s", retries, err, url
+            )
+            return self.urlopen(
+                method,
+                url,
+                body,
+                headers,
+                retries,
+                redirect,
+                assert_same_host,
+                timeout=timeout,
+                pool_timeout=pool_timeout,
+                release_conn=release_conn,
+                chunked=chunked,
+                body_pos=body_pos,
+                **response_kw
+            )
+
+        # Handle redirect?
+        redirect_location = redirect and response.get_redirect_location()
+        if redirect_location:
+            if response.status == 303:
+                method = "GET"
+
+            try:
+                retries = retries.increment(method, url, response=response, _pool=self)
+            except MaxRetryError:
+                if retries.raise_on_redirect:
+                    response.drain_conn()
+                    raise
+                return response
+
+            response.drain_conn()
+            retries.sleep_for_retry(response)
+            log.debug("Redirecting %s -> %s", url, redirect_location)
+            return self.urlopen(
+                method,
+                redirect_location,
+                body,
+                headers,
+                retries=retries,
+                redirect=redirect,
+                assert_same_host=assert_same_host,
+                timeout=timeout,
+                pool_timeout=pool_timeout,
+                release_conn=release_conn,
+                chunked=chunked,
+                body_pos=body_pos,
+                **response_kw
+            )
+
+        # Check if we should retry the HTTP response.
+        has_retry_after = bool(response.getheader("Retry-After"))
+        if retries.is_retry(method, response.status, has_retry_after):
+            try:
+                retries = retries.increment(method, url, response=response, _pool=self)
+            except MaxRetryError:
+                if retries.raise_on_status:
+                    response.drain_conn()
+                    raise
+                return response
+
+            response.drain_conn()
+            retries.sleep(response)
+            log.debug("Retry: %s", url)
+            return self.urlopen(
+                method,
+                url,
+                body,
+                headers,
+                retries=retries,
+                redirect=redirect,
+                assert_same_host=assert_same_host,
+                timeout=timeout,
+                pool_timeout=pool_timeout,
+                release_conn=release_conn,
+                chunked=chunked,
+                body_pos=body_pos,
+                **response_kw
+            )
+
+        return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+    """
+    Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+    :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
+    ``assert_hostname`` and ``host`` in this order to verify connections.
+    If ``assert_hostname`` is False, no verification is done.
+
+    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+    ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+    is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+    the connection socket into an SSL socket.
+    """
+
+    scheme = "https"
+    ConnectionCls = HTTPSConnection
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        strict=False,
+        timeout=Timeout.DEFAULT_TIMEOUT,
+        maxsize=1,
+        block=False,
+        headers=None,
+        retries=None,
+        _proxy=None,
+        _proxy_headers=None,
+        key_file=None,
+        cert_file=None,
+        cert_reqs=None,
+        key_password=None,
+        ca_certs=None,
+        ssl_version=None,
+        assert_hostname=None,
+        assert_fingerprint=None,
+        ca_cert_dir=None,
+        **conn_kw
+    ):
+
+        HTTPConnectionPool.__init__(
+            self,
+            host,
+            port,
+            strict,
+            timeout,
+            maxsize,
+            block,
+            headers,
+            retries,
+            _proxy,
+            _proxy_headers,
+            **conn_kw
+        )
+
+        self.key_file = key_file
+        self.cert_file = cert_file
+        self.cert_reqs = cert_reqs
+        self.key_password = key_password
+        self.ca_certs = ca_certs
+        self.ca_cert_dir = ca_cert_dir
+        self.ssl_version = ssl_version
+        self.assert_hostname = assert_hostname
+        self.assert_fingerprint = assert_fingerprint
+
+    def _prepare_conn(self, conn):
+        """
+        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+        and establish the tunnel if proxy is used.
+        """
+
+        if isinstance(conn, VerifiedHTTPSConnection):
+            conn.set_cert(
+                key_file=self.key_file,
+                key_password=self.key_password,
+                cert_file=self.cert_file,
+                cert_reqs=self.cert_reqs,
+                ca_certs=self.ca_certs,
+                ca_cert_dir=self.ca_cert_dir,
+                assert_hostname=self.assert_hostname,
+                assert_fingerprint=self.assert_fingerprint,
+            )
+            conn.ssl_version = self.ssl_version
+        return conn
+
+    def _prepare_proxy(self, conn):
+        """
+        Establishes a tunnel connection through HTTP CONNECT.
+
+        Tunnel connection is established early because otherwise httplib would
+        improperly set Host: header to proxy's IP:port.
+        """
+
+        conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+
+        if self.proxy.scheme == "https":
+            conn.tls_in_tls_required = True
+
+        conn.connect()
+
+    def _new_conn(self):
+        """
+        Return a fresh :class:`http.client.HTTPSConnection`.
+        """
+        self.num_connections += 1
+        log.debug(
+            "Starting new HTTPS connection (%d): %s:%s",
+            self.num_connections,
+            self.host,
+            self.port or "443",
+        )
+
+        if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+            raise SSLError(
+                "Can't connect to HTTPS URL because the SSL module is not available."
+            )
+
+        actual_host = self.host
+        actual_port = self.port
+        if self.proxy is not None:
+            actual_host = self.proxy.host
+            actual_port = self.proxy.port
+
+        conn = self.ConnectionCls(
+            host=actual_host,
+            port=actual_port,
+            timeout=self.timeout.connect_timeout,
+            strict=self.strict,
+            cert_file=self.cert_file,
+            key_file=self.key_file,
+            key_password=self.key_password,
+            **self.conn_kw
+        )
+
+        return self._prepare_conn(conn)
+
+    def _validate_conn(self, conn):
+        """
+        Called right before a request is made, after the socket is created.
+        """
+        super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+        # Force connect early to allow us to validate the connection.
+        if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
+            conn.connect()
+
+        if not conn.is_verified:
+            warnings.warn(
+                (
+                    "Unverified HTTPS request is being made to host '%s'. "
+                    "Adding certificate verification is strongly advised. See: "
+                    "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+                    "#ssl-warnings" % conn.host
+                ),
+                InsecureRequestWarning,
+            )
+
+
+def connection_from_url(url, **kw):
+    """
+    Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+    This is a shortcut for not having to parse out the scheme, host, and port
+    of the url before creating an :class:`.ConnectionPool` instance.
+
+    :param url:
+        Absolute URL string that must include the scheme. Port is optional.
+
+    :param \\**kw:
+        Passes additional parameters to the constructor of the appropriate
+        :class:`.ConnectionPool`. Useful for specifying things like
+        timeout, maxsize, headers, etc.
+
+    Example::
+
+        >>> conn = connection_from_url('http://google.com/')
+        >>> r = conn.request('GET', '/')
+    """
+    scheme, host, port = get_host(url)
+    port = port or port_by_scheme.get(scheme, 80)
+    if scheme == "https":
+        return HTTPSConnectionPool(host, port=port, **kw)
+    else:
+        return HTTPConnectionPool(host, port=port, **kw)
+
+
+def _normalize_host(host, scheme):
+    """
+    Normalize hosts for comparisons and use with sockets.
+    """
+
+    host = normalize_host(host, scheme)
+
+    # httplib doesn't like it when we include brackets in IPv6 addresses
+    # Specifically, if we include brackets but also pass the port then
+    # httplib crazily doubles up the square brackets on the Host header.
+    # Instead, we need to make sure we never pass ``None`` as the port.
+    # However, for backward compatibility reasons we can't actually
+    # *assert* that.  See http://bugs.python.org/issue28539
+    if host.startswith("[") and host.endswith("]"):
+        host = host[1:-1]
+    return host
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/__init__.py b/venv/lib/python3.7/site-packages/urllib3/contrib/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/_appengine_environ.py b/venv/lib/python3.7/site-packages/urllib3/contrib/_appengine_environ.py
new file mode 100644
index 00000000..8765b907
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/_appengine_environ.py
@@ -0,0 +1,36 @@
+"""
+This module provides means to detect the App Engine environment.
+"""
+
+import os
+
+
+def is_appengine():
+    return is_local_appengine() or is_prod_appengine()
+
+
+def is_appengine_sandbox():
+    """Reports if the app is running in the first generation sandbox.
+
+    The second generation runtimes are technically still in a sandbox, but it
+    is much less restrictive, so generally you shouldn't need to check for it.
+    see https://cloud.google.com/appengine/docs/standard/runtimes
+    """
+    return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
+
+
+def is_local_appengine():
+    return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+        "SERVER_SOFTWARE", ""
+    ).startswith("Development/")
+
+
+def is_prod_appengine():
+    return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+        "SERVER_SOFTWARE", ""
+    ).startswith("Google App Engine/")
+
+
+def is_prod_appengine_mvms():
+    """Deprecated."""
+    return False
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/__init__.py b/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py b/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py
new file mode 100644
index 00000000..11524d40
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py
@@ -0,0 +1,519 @@
+"""
+This module uses ctypes to bind a whole bunch of functions and constants from
+SecureTransport. The goal here is to provide the low-level API to
+SecureTransport. These are essentially the C-level functions and constants, and
+they're pretty gross to work with.
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+    Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the "Software"),
+    to deal in the Software without restriction, including without limitation
+    the rights to use, copy, modify, merge, publish, distribute, sublicense,
+    and/or sell copies of the Software, and to permit persons to whom the
+    Software is furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+    DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import platform
+from ctypes import (
+    CDLL,
+    CFUNCTYPE,
+    POINTER,
+    c_bool,
+    c_byte,
+    c_char_p,
+    c_int32,
+    c_long,
+    c_size_t,
+    c_uint32,
+    c_ulong,
+    c_void_p,
+)
+from ctypes.util import find_library
+
+from urllib3.packages.six import raise_from
+
+if platform.system() != "Darwin":
+    raise ImportError("Only macOS is supported")
+
+version = platform.mac_ver()[0]
+version_info = tuple(map(int, version.split(".")))
+if version_info < (10, 8):
+    raise OSError(
+        "Only OS X 10.8 and newer are supported, not %s.%s"
+        % (version_info[0], version_info[1])
+    )
+
+
+def load_cdll(name, macos10_16_path):
+    """Loads a CDLL by name, falling back to known path on 10.16+"""
+    try:
+        # Big Sur is technically 11 but we use 10.16 due to the Big Sur
+        # beta being labeled as 10.16.
+        if version_info >= (10, 16):
+            path = macos10_16_path
+        else:
+            path = find_library(name)
+        if not path:
+            raise OSError  # Caught and reraised as 'ImportError'
+        return CDLL(path, use_errno=True)
+    except OSError:
+        raise_from(ImportError("The library %s failed to load" % name), None)
+
+
+Security = load_cdll(
+    "Security", "/System/Library/Frameworks/Security.framework/Security"
+)
+CoreFoundation = load_cdll(
+    "CoreFoundation",
+    "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
+)
+
+
+Boolean = c_bool
+CFIndex = c_long
+CFStringEncoding = c_uint32
+CFData = c_void_p
+CFString = c_void_p
+CFArray = c_void_p
+CFMutableArray = c_void_p
+CFDictionary = c_void_p
+CFError = c_void_p
+CFType = c_void_p
+CFTypeID = c_ulong
+
+CFTypeRef = POINTER(CFType)
+CFAllocatorRef = c_void_p
+
+OSStatus = c_int32
+
+CFDataRef = POINTER(CFData)
+CFStringRef = POINTER(CFString)
+CFArrayRef = POINTER(CFArray)
+CFMutableArrayRef = POINTER(CFMutableArray)
+CFDictionaryRef = POINTER(CFDictionary)
+CFArrayCallBacks = c_void_p
+CFDictionaryKeyCallBacks = c_void_p
+CFDictionaryValueCallBacks = c_void_p
+
+SecCertificateRef = POINTER(c_void_p)
+SecExternalFormat = c_uint32
+SecExternalItemType = c_uint32
+SecIdentityRef = POINTER(c_void_p)
+SecItemImportExportFlags = c_uint32
+SecItemImportExportKeyParameters = c_void_p
+SecKeychainRef = POINTER(c_void_p)
+SSLProtocol = c_uint32
+SSLCipherSuite = c_uint32
+SSLContextRef = POINTER(c_void_p)
+SecTrustRef = POINTER(c_void_p)
+SSLConnectionRef = c_uint32
+SecTrustResultType = c_uint32
+SecTrustOptionFlags = c_uint32
+SSLProtocolSide = c_uint32
+SSLConnectionType = c_uint32
+SSLSessionOption = c_uint32
+
+
+try:
+    Security.SecItemImport.argtypes = [
+        CFDataRef,
+        CFStringRef,
+        POINTER(SecExternalFormat),
+        POINTER(SecExternalItemType),
+        SecItemImportExportFlags,
+        POINTER(SecItemImportExportKeyParameters),
+        SecKeychainRef,
+        POINTER(CFArrayRef),
+    ]
+    Security.SecItemImport.restype = OSStatus
+
+    Security.SecCertificateGetTypeID.argtypes = []
+    Security.SecCertificateGetTypeID.restype = CFTypeID
+
+    Security.SecIdentityGetTypeID.argtypes = []
+    Security.SecIdentityGetTypeID.restype = CFTypeID
+
+    Security.SecKeyGetTypeID.argtypes = []
+    Security.SecKeyGetTypeID.restype = CFTypeID
+
+    Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
+    Security.SecCertificateCreateWithData.restype = SecCertificateRef
+
+    Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
+    Security.SecCertificateCopyData.restype = CFDataRef
+
+    Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+    Security.SecCopyErrorMessageString.restype = CFStringRef
+
+    Security.SecIdentityCreateWithCertificate.argtypes = [
+        CFTypeRef,
+        SecCertificateRef,
+        POINTER(SecIdentityRef),
+    ]
+    Security.SecIdentityCreateWithCertificate.restype = OSStatus
+
+    Security.SecKeychainCreate.argtypes = [
+        c_char_p,
+        c_uint32,
+        c_void_p,
+        Boolean,
+        c_void_p,
+        POINTER(SecKeychainRef),
+    ]
+    Security.SecKeychainCreate.restype = OSStatus
+
+    Security.SecKeychainDelete.argtypes = [SecKeychainRef]
+    Security.SecKeychainDelete.restype = OSStatus
+
+    Security.SecPKCS12Import.argtypes = [
+        CFDataRef,
+        CFDictionaryRef,
+        POINTER(CFArrayRef),
+    ]
+    Security.SecPKCS12Import.restype = OSStatus
+
+    SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
+    SSLWriteFunc = CFUNCTYPE(
+        OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
+    )
+
+    Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
+    Security.SSLSetIOFuncs.restype = OSStatus
+
+    Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
+    Security.SSLSetPeerID.restype = OSStatus
+
+    Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
+    Security.SSLSetCertificate.restype = OSStatus
+
+    Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
+    Security.SSLSetCertificateAuthorities.restype = OSStatus
+
+    Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
+    Security.SSLSetConnection.restype = OSStatus
+
+    Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
+    Security.SSLSetPeerDomainName.restype = OSStatus
+
+    Security.SSLHandshake.argtypes = [SSLContextRef]
+    Security.SSLHandshake.restype = OSStatus
+
+    Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+    Security.SSLRead.restype = OSStatus
+
+    Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+    Security.SSLWrite.restype = OSStatus
+
+    Security.SSLClose.argtypes = [SSLContextRef]
+    Security.SSLClose.restype = OSStatus
+
+    Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
+    Security.SSLGetNumberSupportedCiphers.restype = OSStatus
+
+    Security.SSLGetSupportedCiphers.argtypes = [
+        SSLContextRef,
+        POINTER(SSLCipherSuite),
+        POINTER(c_size_t),
+    ]
+    Security.SSLGetSupportedCiphers.restype = OSStatus
+
+    Security.SSLSetEnabledCiphers.argtypes = [
+        SSLContextRef,
+        POINTER(SSLCipherSuite),
+        c_size_t,
+    ]
+    Security.SSLSetEnabledCiphers.restype = OSStatus
+
+    Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
+    Security.SSLGetNumberEnabledCiphers.restype = OSStatus
+
+    Security.SSLGetEnabledCiphers.argtypes = [
+        SSLContextRef,
+        POINTER(SSLCipherSuite),
+        POINTER(c_size_t),
+    ]
+    Security.SSLGetEnabledCiphers.restype = OSStatus
+
+    Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
+    Security.SSLGetNegotiatedCipher.restype = OSStatus
+
+    Security.SSLGetNegotiatedProtocolVersion.argtypes = [
+        SSLContextRef,
+        POINTER(SSLProtocol),
+    ]
+    Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
+
+    Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
+    Security.SSLCopyPeerTrust.restype = OSStatus
+
+    Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
+    Security.SecTrustSetAnchorCertificates.restype = OSStatus
+
+    Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
+    Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
+
+    Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
+    Security.SecTrustEvaluate.restype = OSStatus
+
+    Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
+    Security.SecTrustGetCertificateCount.restype = CFIndex
+
+    Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
+    Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
+
+    Security.SSLCreateContext.argtypes = [
+        CFAllocatorRef,
+        SSLProtocolSide,
+        SSLConnectionType,
+    ]
+    Security.SSLCreateContext.restype = SSLContextRef
+
+    Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
+    Security.SSLSetSessionOption.restype = OSStatus
+
+    Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
+    Security.SSLSetProtocolVersionMin.restype = OSStatus
+
+    Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
+    Security.SSLSetProtocolVersionMax.restype = OSStatus
+
+    try:
+        Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
+        Security.SSLSetALPNProtocols.restype = OSStatus
+    except AttributeError:
+        # Supported only in 10.12+
+        pass
+
+    Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+    Security.SecCopyErrorMessageString.restype = CFStringRef
+
+    Security.SSLReadFunc = SSLReadFunc
+    Security.SSLWriteFunc = SSLWriteFunc
+    Security.SSLContextRef = SSLContextRef
+    Security.SSLProtocol = SSLProtocol
+    Security.SSLCipherSuite = SSLCipherSuite
+    Security.SecIdentityRef = SecIdentityRef
+    Security.SecKeychainRef = SecKeychainRef
+    Security.SecTrustRef = SecTrustRef
+    Security.SecTrustResultType = SecTrustResultType
+    Security.SecExternalFormat = SecExternalFormat
+    Security.OSStatus = OSStatus
+
+    Security.kSecImportExportPassphrase = CFStringRef.in_dll(
+        Security, "kSecImportExportPassphrase"
+    )
+    Security.kSecImportItemIdentity = CFStringRef.in_dll(
+        Security, "kSecImportItemIdentity"
+    )
+
+    # CoreFoundation time!
+    CoreFoundation.CFRetain.argtypes = [CFTypeRef]
+    CoreFoundation.CFRetain.restype = CFTypeRef
+
+    CoreFoundation.CFRelease.argtypes = [CFTypeRef]
+    CoreFoundation.CFRelease.restype = None
+
+    CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
+    CoreFoundation.CFGetTypeID.restype = CFTypeID
+
+    CoreFoundation.CFStringCreateWithCString.argtypes = [
+        CFAllocatorRef,
+        c_char_p,
+        CFStringEncoding,
+    ]
+    CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
+
+    CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
+    CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
+
+    CoreFoundation.CFStringGetCString.argtypes = [
+        CFStringRef,
+        c_char_p,
+        CFIndex,
+        CFStringEncoding,
+    ]
+    CoreFoundation.CFStringGetCString.restype = c_bool
+
+    CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
+    CoreFoundation.CFDataCreate.restype = CFDataRef
+
+    CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
+    CoreFoundation.CFDataGetLength.restype = CFIndex
+
+    CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
+    CoreFoundation.CFDataGetBytePtr.restype = c_void_p
+
+    CoreFoundation.CFDictionaryCreate.argtypes = [
+        CFAllocatorRef,
+        POINTER(CFTypeRef),
+        POINTER(CFTypeRef),
+        CFIndex,
+        CFDictionaryKeyCallBacks,
+        CFDictionaryValueCallBacks,
+    ]
+    CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
+
+    CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
+    CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
+
+    CoreFoundation.CFArrayCreate.argtypes = [
+        CFAllocatorRef,
+        POINTER(CFTypeRef),
+        CFIndex,
+        CFArrayCallBacks,
+    ]
+    CoreFoundation.CFArrayCreate.restype = CFArrayRef
+
+    CoreFoundation.CFArrayCreateMutable.argtypes = [
+        CFAllocatorRef,
+        CFIndex,
+        CFArrayCallBacks,
+    ]
+    CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
+
+    CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
+    CoreFoundation.CFArrayAppendValue.restype = None
+
+    CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
+    CoreFoundation.CFArrayGetCount.restype = CFIndex
+
+    CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
+    CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
+
+    CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
+        CoreFoundation, "kCFAllocatorDefault"
+    )
+    CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
+        CoreFoundation, "kCFTypeArrayCallBacks"
+    )
+    CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
+        CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
+    )
+    CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
+        CoreFoundation, "kCFTypeDictionaryValueCallBacks"
+    )
+
+    CoreFoundation.CFTypeRef = CFTypeRef
+    CoreFoundation.CFArrayRef = CFArrayRef
+    CoreFoundation.CFStringRef = CFStringRef
+    CoreFoundation.CFDictionaryRef = CFDictionaryRef
+
+except (AttributeError):
+    raise ImportError("Error initializing ctypes")
+
+
+class CFConst(object):
+    """
+    A class object that acts as essentially a namespace for CoreFoundation
+    constants.
+    """
+
+    kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
+
+
+class SecurityConst(object):
+    """
+    A class object that acts as essentially a namespace for Security constants.
+    """
+
+    kSSLSessionOptionBreakOnServerAuth = 0
+
+    kSSLProtocol2 = 1
+    kSSLProtocol3 = 2
+    kTLSProtocol1 = 4
+    kTLSProtocol11 = 7
+    kTLSProtocol12 = 8
+    # SecureTransport does not support TLS 1.3 even if there's a constant for it
+    kTLSProtocol13 = 10
+    kTLSProtocolMaxSupported = 999
+
+    kSSLClientSide = 1
+    kSSLStreamType = 0
+
+    kSecFormatPEMSequence = 10
+
+    kSecTrustResultInvalid = 0
+    kSecTrustResultProceed = 1
+    # This gap is present on purpose: this was kSecTrustResultConfirm, which
+    # is deprecated.
+    kSecTrustResultDeny = 3
+    kSecTrustResultUnspecified = 4
+    kSecTrustResultRecoverableTrustFailure = 5
+    kSecTrustResultFatalTrustFailure = 6
+    kSecTrustResultOtherError = 7
+
+    errSSLProtocol = -9800
+    errSSLWouldBlock = -9803
+    errSSLClosedGraceful = -9805
+    errSSLClosedNoNotify = -9816
+    errSSLClosedAbort = -9806
+
+    errSSLXCertChainInvalid = -9807
+    errSSLCrypto = -9809
+    errSSLInternal = -9810
+    errSSLCertExpired = -9814
+    errSSLCertNotYetValid = -9815
+    errSSLUnknownRootCert = -9812
+    errSSLNoRootCert = -9813
+    errSSLHostNameMismatch = -9843
+    errSSLPeerHandshakeFail = -9824
+    errSSLPeerUserCancelled = -9839
+    errSSLWeakPeerEphemeralDHKey = -9850
+    errSSLServerAuthCompleted = -9841
+    errSSLRecordOverflow = -9847
+
+    errSecVerifyFailed = -67808
+    errSecNoTrustSettings = -25263
+    errSecItemNotFound = -25300
+    errSecInvalidTrustSettings = -25262
+
+    # Cipher suites. We only pick the ones our default cipher string allows.
+    # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
+    TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
+    TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
+    TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
+    TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
+    TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
+    TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
+    TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
+    TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
+    TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
+    TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
+    TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
+    TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
+    TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
+    TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
+    TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
+    TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
+    TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
+    TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
+    TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
+    TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
+    TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
+    TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
+    TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
+    TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
+    TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
+    TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
+    TLS_AES_128_GCM_SHA256 = 0x1301
+    TLS_AES_256_GCM_SHA384 = 0x1302
+    TLS_AES_128_CCM_8_SHA256 = 0x1305
+    TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py b/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py
new file mode 100644
index 00000000..ed812019
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py
@@ -0,0 +1,396 @@
+"""
+Low-level helpers for the SecureTransport bindings.
+
+These are Python functions that are not directly related to the high-level APIs
+but are necessary to get them to work. They include a whole bunch of low-level
+CoreFoundation messing about and memory management. The concerns in this module
+are almost entirely about trying to avoid memory leaks and providing
+appropriate and useful assistance to the higher-level code.
+"""
+import base64
+import ctypes
+import itertools
+import os
+import re
+import ssl
+import struct
+import tempfile
+
+from .bindings import CFConst, CoreFoundation, Security
+
+# This regular expression is used to grab PEM data out of a PEM bundle.
+_PEM_CERTS_RE = re.compile(
+    b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
+)
+
+
+def _cf_data_from_bytes(bytestring):
+    """
+    Given a bytestring, create a CFData object from it. This CFData object must
+    be CFReleased by the caller.
+    """
+    return CoreFoundation.CFDataCreate(
+        CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
+    )
+
+
+def _cf_dictionary_from_tuples(tuples):
+    """
+    Given a list of Python tuples, create an associated CFDictionary.
+    """
+    dictionary_size = len(tuples)
+
+    # We need to get the dictionary keys and values out in the same order.
+    keys = (t[0] for t in tuples)
+    values = (t[1] for t in tuples)
+    cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
+    cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
+
+    return CoreFoundation.CFDictionaryCreate(
+        CoreFoundation.kCFAllocatorDefault,
+        cf_keys,
+        cf_values,
+        dictionary_size,
+        CoreFoundation.kCFTypeDictionaryKeyCallBacks,
+        CoreFoundation.kCFTypeDictionaryValueCallBacks,
+    )
+
+
+def _cfstr(py_bstr):
+    """
+    Given a Python binary data, create a CFString.
+    The string must be CFReleased by the caller.
+    """
+    c_str = ctypes.c_char_p(py_bstr)
+    cf_str = CoreFoundation.CFStringCreateWithCString(
+        CoreFoundation.kCFAllocatorDefault,
+        c_str,
+        CFConst.kCFStringEncodingUTF8,
+    )
+    return cf_str
+
+
+def _create_cfstring_array(lst):
+    """
+    Given a list of Python binary data, create an associated CFMutableArray.
+    The array must be CFReleased by the caller.
+
+    Raises an ssl.SSLError on failure.
+    """
+    cf_arr = None
+    try:
+        cf_arr = CoreFoundation.CFArrayCreateMutable(
+            CoreFoundation.kCFAllocatorDefault,
+            0,
+            ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+        )
+        if not cf_arr:
+            raise MemoryError("Unable to allocate memory!")
+        for item in lst:
+            cf_str = _cfstr(item)
+            if not cf_str:
+                raise MemoryError("Unable to allocate memory!")
+            try:
+                CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
+            finally:
+                CoreFoundation.CFRelease(cf_str)
+    except BaseException as e:
+        if cf_arr:
+            CoreFoundation.CFRelease(cf_arr)
+        raise ssl.SSLError("Unable to allocate array: %s" % (e,))
+    return cf_arr
+
+
+def _cf_string_to_unicode(value):
+    """
+    Creates a Unicode string from a CFString object. Used entirely for error
+    reporting.
+
+    Yes, it annoys me quite a lot that this function is this complex.
+    """
+    value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
+
+    string = CoreFoundation.CFStringGetCStringPtr(
+        value_as_void_p, CFConst.kCFStringEncodingUTF8
+    )
+    if string is None:
+        buffer = ctypes.create_string_buffer(1024)
+        result = CoreFoundation.CFStringGetCString(
+            value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
+        )
+        if not result:
+            raise OSError("Error copying C string from CFStringRef")
+        string = buffer.value
+    if string is not None:
+        string = string.decode("utf-8")
+    return string
+
+
+def _assert_no_error(error, exception_class=None):
+    """
+    Checks the return code and throws an exception if there is an error to
+    report
+    """
+    if error == 0:
+        return
+
+    cf_error_string = Security.SecCopyErrorMessageString(error, None)
+    output = _cf_string_to_unicode(cf_error_string)
+    CoreFoundation.CFRelease(cf_error_string)
+
+    if output is None or output == u"":
+        output = u"OSStatus %s" % error
+
+    if exception_class is None:
+        exception_class = ssl.SSLError
+
+    raise exception_class(output)
+
+
+def _cert_array_from_pem(pem_bundle):
+    """
+    Given a bundle of certs in PEM format, turns them into a CFArray of certs
+    that can be used to validate a cert chain.
+    """
+    # Normalize the PEM bundle's line endings.
+    pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
+
+    der_certs = [
+        base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
+    ]
+    if not der_certs:
+        raise ssl.SSLError("No root certificates specified")
+
+    cert_array = CoreFoundation.CFArrayCreateMutable(
+        CoreFoundation.kCFAllocatorDefault,
+        0,
+        ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+    )
+    if not cert_array:
+        raise ssl.SSLError("Unable to allocate memory!")
+
+    try:
+        for der_bytes in der_certs:
+            certdata = _cf_data_from_bytes(der_bytes)
+            if not certdata:
+                raise ssl.SSLError("Unable to allocate memory!")
+            cert = Security.SecCertificateCreateWithData(
+                CoreFoundation.kCFAllocatorDefault, certdata
+            )
+            CoreFoundation.CFRelease(certdata)
+            if not cert:
+                raise ssl.SSLError("Unable to build cert object!")
+
+            CoreFoundation.CFArrayAppendValue(cert_array, cert)
+            CoreFoundation.CFRelease(cert)
+    except Exception:
+        # We need to free the array before the exception bubbles further.
+        # We only want to do that if an error occurs: otherwise, the caller
+        # should free.
+        CoreFoundation.CFRelease(cert_array)
+
+    return cert_array
+
+
+def _is_cert(item):
+    """
+    Returns True if a given CFTypeRef is a certificate.
+    """
+    expected = Security.SecCertificateGetTypeID()
+    return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _is_identity(item):
+    """
+    Returns True if a given CFTypeRef is an identity.
+    """
+    expected = Security.SecIdentityGetTypeID()
+    return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _temporary_keychain():
+    """
+    This function creates a temporary Mac keychain that we can use to work with
+    credentials. This keychain uses a one-time password and a temporary file to
+    store the data. We expect to have one keychain per socket. The returned
+    SecKeychainRef must be freed by the caller, including calling
+    SecKeychainDelete.
+
+    Returns a tuple of the SecKeychainRef and the path to the temporary
+    directory that contains it.
+    """
+    # Unfortunately, SecKeychainCreate requires a path to a keychain. This
+    # means we cannot use mkstemp to use a generic temporary file. Instead,
+    # we're going to create a temporary directory and a filename to use there.
+    # This filename will be 8 random bytes expanded into base64. We also need
+    # some random bytes to password-protect the keychain we're creating, so we
+    # ask for 40 random bytes.
+    random_bytes = os.urandom(40)
+    filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
+    password = base64.b16encode(random_bytes[8:])  # Must be valid UTF-8
+    tempdirectory = tempfile.mkdtemp()
+
+    keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
+
+    # We now want to create the keychain itself.
+    keychain = Security.SecKeychainRef()
+    status = Security.SecKeychainCreate(
+        keychain_path, len(password), password, False, None, ctypes.byref(keychain)
+    )
+    _assert_no_error(status)
+
+    # Having created the keychain, we want to pass it off to the caller.
+    return keychain, tempdirectory
+
+
+def _load_items_from_file(keychain, path):
+    """
+    Given a single file, loads all the trust objects from it into arrays and
+    the keychain.
+    Returns a tuple of lists: the first list is a list of identities, the
+    second a list of certs.
+    """
+    certificates = []
+    identities = []
+    result_array = None
+
+    with open(path, "rb") as f:
+        raw_filedata = f.read()
+
+    try:
+        filedata = CoreFoundation.CFDataCreate(
+            CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
+        )
+        result_array = CoreFoundation.CFArrayRef()
+        result = Security.SecItemImport(
+            filedata,  # cert data
+            None,  # Filename, leaving it out for now
+            None,  # What the type of the file is, we don't care
+            None,  # what's in the file, we don't care
+            0,  # import flags
+            None,  # key params, can include passphrase in the future
+            keychain,  # The keychain to insert into
+            ctypes.byref(result_array),  # Results
+        )
+        _assert_no_error(result)
+
+        # A CFArray is not very useful to us as an intermediary
+        # representation, so we are going to extract the objects we want
+        # and then free the array. We don't need to keep hold of keys: the
+        # keychain already has them!
+        result_count = CoreFoundation.CFArrayGetCount(result_array)
+        for index in range(result_count):
+            item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
+            item = ctypes.cast(item, CoreFoundation.CFTypeRef)
+
+            if _is_cert(item):
+                CoreFoundation.CFRetain(item)
+                certificates.append(item)
+            elif _is_identity(item):
+                CoreFoundation.CFRetain(item)
+                identities.append(item)
+    finally:
+        if result_array:
+            CoreFoundation.CFRelease(result_array)
+
+        CoreFoundation.CFRelease(filedata)
+
+    return (identities, certificates)
+
+
+def _load_client_cert_chain(keychain, *paths):
+    """
+    Load certificates and maybe keys from a number of files. Has the end goal
+    of returning a CFArray containing one SecIdentityRef, and then zero or more
+    SecCertificateRef objects, suitable for use as a client certificate trust
+    chain.
+    """
+    # Ok, the strategy.
+    #
+    # This relies on knowing that macOS will not give you a SecIdentityRef
+    # unless you have imported a key into a keychain. This is a somewhat
+    # artificial limitation of macOS (for example, it doesn't necessarily
+    # affect iOS), but there is nothing inside Security.framework that lets you
+    # get a SecIdentityRef without having a key in a keychain.
+    #
+    # So the policy here is we take all the files and iterate them in order.
+    # Each one will use SecItemImport to have one or more objects loaded from
+    # it. We will also point at a keychain that macOS can use to work with the
+    # private key.
+    #
+    # Once we have all the objects, we'll check what we actually have. If we
+    # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
+    # we'll take the first certificate (which we assume to be our leaf) and
+    # ask the keychain to give us a SecIdentityRef with that cert's associated
+    # key.
+    #
+    # We'll then return a CFArray containing the trust chain: one
+    # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
+    # responsibility for freeing this CFArray will be with the caller. This
+    # CFArray must remain alive for the entire connection, so in practice it
+    # will be stored with a single SSLSocket, along with the reference to the
+    # keychain.
+    certificates = []
+    identities = []
+
+    # Filter out bad paths.
+    paths = (path for path in paths if path)
+
+    try:
+        for file_path in paths:
+            new_identities, new_certs = _load_items_from_file(keychain, file_path)
+            identities.extend(new_identities)
+            certificates.extend(new_certs)
+
+        # Ok, we have everything. The question is: do we have an identity? If
+        # not, we want to grab one from the first cert we have.
+        if not identities:
+            new_identity = Security.SecIdentityRef()
+            status = Security.SecIdentityCreateWithCertificate(
+                keychain, certificates[0], ctypes.byref(new_identity)
+            )
+            _assert_no_error(status)
+            identities.append(new_identity)
+
+            # We now want to release the original certificate, as we no longer
+            # need it.
+            CoreFoundation.CFRelease(certificates.pop(0))
+
+        # We now need to build a new CFArray that holds the trust chain.
+        trust_chain = CoreFoundation.CFArrayCreateMutable(
+            CoreFoundation.kCFAllocatorDefault,
+            0,
+            ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+        )
+        for item in itertools.chain(identities, certificates):
+            # ArrayAppendValue does a CFRetain on the item. That's fine,
+            # because the finally block will release our other refs to them.
+            CoreFoundation.CFArrayAppendValue(trust_chain, item)
+
+        return trust_chain
+    finally:
+        for obj in itertools.chain(identities, certificates):
+            CoreFoundation.CFRelease(obj)
+
+
+TLS_PROTOCOL_VERSIONS = {
+    "SSLv2": (0, 2),
+    "SSLv3": (3, 0),
+    "TLSv1": (3, 1),
+    "TLSv1.1": (3, 2),
+    "TLSv1.2": (3, 3),
+}
+
+
+def _build_tls_unknown_ca_alert(version):
+    """
+    Builds a TLS alert record for an unknown CA.
+    """
+    ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
+    severity_fatal = 0x02
+    description_unknown_ca = 0x30
+    msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
+    msg_len = len(msg)
+    record_type_alert = 0x15
+    record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
+    return record
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py b/venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py
new file mode 100644
index 00000000..aa64a091
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py
@@ -0,0 +1,314 @@
+"""
+This module provides a pool manager that uses Google App Engine's
+`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+Example usage::
+
+    from urllib3 import PoolManager
+    from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
+
+    if is_appengine_sandbox():
+        # AppEngineManager uses AppEngine's URLFetch API behind the scenes
+        http = AppEngineManager()
+    else:
+        # PoolManager uses a socket-level API behind the scenes
+        http = PoolManager()
+
+    r = http.request('GET', 'https://google.com/')
+
+There are `limitations <https://cloud.google.com/appengine/docs/python/\
+urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
+the best choice for your application. There are three options for using
+urllib3 on Google App Engine:
+
+1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
+   cost-effective in many circumstances as long as your usage is within the
+   limitations.
+2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
+   Sockets also have `limitations and restrictions
+   <https://cloud.google.com/appengine/docs/python/sockets/\
+   #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
+   To use sockets, be sure to specify the following in your ``app.yaml``::
+
+        env_variables:
+            GAE_USE_SOCKETS_HTTPLIB : 'true'
+
+3. If you are using `App Engine Flexible
+<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
+:class:`PoolManager` without any configuration or special environment variables.
+"""
+
+from __future__ import absolute_import
+
+import io
+import logging
+import warnings
+
+from ..exceptions import (
+    HTTPError,
+    HTTPWarning,
+    MaxRetryError,
+    ProtocolError,
+    SSLError,
+    TimeoutError,
+)
+from ..packages.six.moves.urllib.parse import urljoin
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.retry import Retry
+from ..util.timeout import Timeout
+from . import _appengine_environ
+
+try:
+    from google.appengine.api import urlfetch
+except ImportError:
+    urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+    pass
+
+
+class AppEnginePlatformError(HTTPError):
+    pass
+
+
+class AppEngineManager(RequestMethods):
+    """
+    Connection manager for Google App Engine sandbox applications.
+
+    This manager uses the URLFetch service directly instead of using the
+    emulated httplib, and is subject to URLFetch limitations as described in
+    the App Engine documentation `here
+    <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+    Notably it will raise an :class:`AppEnginePlatformError` if:
+        * URLFetch is not available.
+        * If you attempt to use this on App Engine Flexible, as full socket
+          support is available.
+        * If a request size is more than 10 megabytes.
+        * If a response size is more than 32 megabytes.
+        * If you use an unsupported request method such as OPTIONS.
+
+    Beyond those cases, it will raise normal urllib3 errors.
+    """
+
+    def __init__(
+        self,
+        headers=None,
+        retries=None,
+        validate_certificate=True,
+        urlfetch_retries=True,
+    ):
+        if not urlfetch:
+            raise AppEnginePlatformError(
+                "URLFetch is not available in this environment."
+            )
+
+        warnings.warn(
+            "urllib3 is using URLFetch on Google App Engine sandbox instead "
+            "of sockets. To use sockets directly instead of URLFetch see "
+            "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
+            AppEnginePlatformWarning,
+        )
+
+        RequestMethods.__init__(self, headers)
+        self.validate_certificate = validate_certificate
+        self.urlfetch_retries = urlfetch_retries
+
+        self.retries = retries or Retry.DEFAULT
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        # Return False to re-raise any potential exceptions
+        return False
+
+    def urlopen(
+        self,
+        method,
+        url,
+        body=None,
+        headers=None,
+        retries=None,
+        redirect=True,
+        timeout=Timeout.DEFAULT_TIMEOUT,
+        **response_kw
+    ):
+
+        retries = self._get_retries(retries, redirect)
+
+        try:
+            follow_redirects = redirect and retries.redirect != 0 and retries.total
+            response = urlfetch.fetch(
+                url,
+                payload=body,
+                method=method,
+                headers=headers or {},
+                allow_truncated=False,
+                follow_redirects=self.urlfetch_retries and follow_redirects,
+                deadline=self._get_absolute_timeout(timeout),
+                validate_certificate=self.validate_certificate,
+            )
+        except urlfetch.DeadlineExceededError as e:
+            raise TimeoutError(self, e)
+
+        except urlfetch.InvalidURLError as e:
+            if "too large" in str(e):
+                raise AppEnginePlatformError(
+                    "URLFetch request too large, URLFetch only "
+                    "supports requests up to 10mb in size.",
+                    e,
+                )
+            raise ProtocolError(e)
+
+        except urlfetch.DownloadError as e:
+            if "Too many redirects" in str(e):
+                raise MaxRetryError(self, url, reason=e)
+            raise ProtocolError(e)
+
+        except urlfetch.ResponseTooLargeError as e:
+            raise AppEnginePlatformError(
+                "URLFetch response too large, URLFetch only supports"
+                "responses up to 32mb in size.",
+                e,
+            )
+
+        except urlfetch.SSLCertificateError as e:
+            raise SSLError(e)
+
+        except urlfetch.InvalidMethodError as e:
+            raise AppEnginePlatformError(
+                "URLFetch does not support method: %s" % method, e
+            )
+
+        http_response = self._urlfetch_response_to_http_response(
+            response, retries=retries, **response_kw
+        )
+
+        # Handle redirect?
+        redirect_location = redirect and http_response.get_redirect_location()
+        if redirect_location:
+            # Check for redirect response
+            if self.urlfetch_retries and retries.raise_on_redirect:
+                raise MaxRetryError(self, url, "too many redirects")
+            else:
+                if http_response.status == 303:
+                    method = "GET"
+
+                try:
+                    retries = retries.increment(
+                        method, url, response=http_response, _pool=self
+                    )
+                except MaxRetryError:
+                    if retries.raise_on_redirect:
+                        raise MaxRetryError(self, url, "too many redirects")
+                    return http_response
+
+                retries.sleep_for_retry(http_response)
+                log.debug("Redirecting %s -> %s", url, redirect_location)
+                redirect_url = urljoin(url, redirect_location)
+                return self.urlopen(
+                    method,
+                    redirect_url,
+                    body,
+                    headers,
+                    retries=retries,
+                    redirect=redirect,
+                    timeout=timeout,
+                    **response_kw
+                )
+
+        # Check if we should retry the HTTP response.
+        has_retry_after = bool(http_response.getheader("Retry-After"))
+        if retries.is_retry(method, http_response.status, has_retry_after):
+            retries = retries.increment(method, url, response=http_response, _pool=self)
+            log.debug("Retry: %s", url)
+            retries.sleep(http_response)
+            return self.urlopen(
+                method,
+                url,
+                body=body,
+                headers=headers,
+                retries=retries,
+                redirect=redirect,
+                timeout=timeout,
+                **response_kw
+            )
+
+        return http_response
+
+    def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+        if is_prod_appengine():
+            # Production GAE handles deflate encoding automatically, but does
+            # not remove the encoding header.
+            content_encoding = urlfetch_resp.headers.get("content-encoding")
+
+            if content_encoding == "deflate":
+                del urlfetch_resp.headers["content-encoding"]
+
+        transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
+        # We have a full response's content,
+        # so let's make sure we don't report ourselves as chunked data.
+        if transfer_encoding == "chunked":
+            encodings = transfer_encoding.split(",")
+            encodings.remove("chunked")
+            urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
+
+        original_response = HTTPResponse(
+            # In order for decoding to work, we must present the content as
+            # a file-like object.
+            body=io.BytesIO(urlfetch_resp.content),
+            msg=urlfetch_resp.header_msg,
+            headers=urlfetch_resp.headers,
+            status=urlfetch_resp.status_code,
+            **response_kw
+        )
+
+        return HTTPResponse(
+            body=io.BytesIO(urlfetch_resp.content),
+            headers=urlfetch_resp.headers,
+            status=urlfetch_resp.status_code,
+            original_response=original_response,
+            **response_kw
+        )
+
+    def _get_absolute_timeout(self, timeout):
+        if timeout is Timeout.DEFAULT_TIMEOUT:
+            return None  # Defer to URLFetch's default.
+        if isinstance(timeout, Timeout):
+            if timeout._read is not None or timeout._connect is not None:
+                warnings.warn(
+                    "URLFetch does not support granular timeout settings, "
+                    "reverting to total or default URLFetch timeout.",
+                    AppEnginePlatformWarning,
+                )
+            return timeout.total
+        return timeout
+
+    def _get_retries(self, retries, redirect):
+        if not isinstance(retries, Retry):
+            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+        if retries.connect or retries.read or retries.redirect:
+            warnings.warn(
+                "URLFetch only supports total retries and does not "
+                "recognize connect, read, or redirect retry parameters.",
+                AppEnginePlatformWarning,
+            )
+
+        return retries
+
+
+# Alias methods from _appengine_environ to maintain public API interface.
+
+is_appengine = _appengine_environ.is_appengine
+is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
+is_local_appengine = _appengine_environ.is_local_appengine
+is_prod_appengine = _appengine_environ.is_prod_appengine
+is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py b/venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py
new file mode 100644
index 00000000..b2df45dc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,121 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+from logging import getLogger
+
+from ntlm import ntlm
+
+from .. import HTTPSConnectionPool
+from ..packages.six.moves.http_client import HTTPSConnection
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+    """
+    Implements an NTLM authentication version of an urllib3 connection pool
+    """
+
+    scheme = "https"
+
+    def __init__(self, user, pw, authurl, *args, **kwargs):
+        """
+        authurl is a random URL on the server that is protected by NTLM.
+        user is the Windows user, probably in the DOMAIN\\username format.
+        pw is the password for the user.
+        """
+        super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+        self.authurl = authurl
+        self.rawuser = user
+        user_parts = user.split("\\", 1)
+        self.domain = user_parts[0].upper()
+        self.user = user_parts[1]
+        self.pw = pw
+
+    def _new_conn(self):
+        # Performs the NTLM handshake that secures the connection. The socket
+        # must be kept open while requests are performed.
+        self.num_connections += 1
+        log.debug(
+            "Starting NTLM HTTPS connection no. %d: https://%s%s",
+            self.num_connections,
+            self.host,
+            self.authurl,
+        )
+
+        headers = {"Connection": "Keep-Alive"}
+        req_header = "Authorization"
+        resp_header = "www-authenticate"
+
+        conn = HTTPSConnection(host=self.host, port=self.port)
+
+        # Send negotiation message
+        headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
+            self.rawuser
+        )
+        log.debug("Request headers: %s", headers)
+        conn.request("GET", self.authurl, None, headers)
+        res = conn.getresponse()
+        reshdr = dict(res.getheaders())
+        log.debug("Response status: %s %s", res.status, res.reason)
+        log.debug("Response headers: %s", reshdr)
+        log.debug("Response data: %s [...]", res.read(100))
+
+        # Remove the reference to the socket, so that it can not be closed by
+        # the response object (we want to keep the socket open)
+        res.fp = None
+
+        # Server should respond with a challenge message
+        auth_header_values = reshdr[resp_header].split(", ")
+        auth_header_value = None
+        for s in auth_header_values:
+            if s[:5] == "NTLM ":
+                auth_header_value = s[5:]
+        if auth_header_value is None:
+            raise Exception(
+                "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
+            )
+
+        # Send authentication message
+        ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
+            auth_header_value
+        )
+        auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
+            ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
+        )
+        headers[req_header] = "NTLM %s" % auth_msg
+        log.debug("Request headers: %s", headers)
+        conn.request("GET", self.authurl, None, headers)
+        res = conn.getresponse()
+        log.debug("Response status: %s %s", res.status, res.reason)
+        log.debug("Response headers: %s", dict(res.getheaders()))
+        log.debug("Response data: %s [...]", res.read()[:100])
+        if res.status != 200:
+            if res.status == 401:
+                raise Exception("Server rejected request: wrong username or password")
+            raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
+
+        res.fp = None
+        log.debug("Connection established")
+        return conn
+
+    def urlopen(
+        self,
+        method,
+        url,
+        body=None,
+        headers=None,
+        retries=3,
+        redirect=True,
+        assert_same_host=True,
+    ):
+        if headers is None:
+            headers = {}
+        headers["Connection"] = "Keep-Alive"
+        return super(NTLMConnectionPool, self).urlopen(
+            method, url, body, headers, retries, redirect, assert_same_host
+        )
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py b/venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py
new file mode 100644
index 00000000..0cabab1a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,509 @@
+"""
+TLS with SNI_-support for Python 2. Follow these instructions if you would
+like to verify TLS certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* `pyOpenSSL`_ (tested with 16.0.0)
+* `cryptography`_ (minimum 1.3.4, from pyopenssl)
+* `idna`_ (minimum 2.0, from cryptography)
+
+However, pyopenssl depends on cryptography, which depends on idna, so while we
+use all three directly here we end up having relatively few packages required.
+
+You can install them with the following command:
+
+.. code-block:: bash
+
+    $ python -m pip install pyopenssl cryptography idna
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this:
+
+.. code-block:: python
+
+    try:
+        import urllib3.contrib.pyopenssl
+        urllib3.contrib.pyopenssl.inject_into_urllib3()
+    except ImportError:
+        pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+.. _pyopenssl: https://www.pyopenssl.org
+.. _cryptography: https://cryptography.io
+.. _idna: https://github.com/kjd/idna
+"""
+from __future__ import absolute_import
+
+import OpenSSL.SSL
+from cryptography import x509
+from cryptography.hazmat.backends.openssl import backend as openssl_backend
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+
+try:
+    from cryptography.x509 import UnsupportedExtension
+except ImportError:
+    # UnsupportedExtension is gone in cryptography >= 2.1.0
+    class UnsupportedExtension(Exception):
+        pass
+
+
+from io import BytesIO
+from socket import error as SocketError
+from socket import timeout
+
+try:  # Platform-specific: Python 2
+    from socket import _fileobject
+except ImportError:  # Platform-specific: Python 3
+    _fileobject = None
+    from ..packages.backports.makefile import backport_makefile
+
+import logging
+import ssl
+import sys
+
+from .. import util
+from ..packages import six
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works.
+HAS_SNI = True
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+    util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+    ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
+    _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+    _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
+    _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+
+_stdlib_to_openssl_verify = {
+    ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+    ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+    ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+    + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+
+log = logging.getLogger(__name__)
+
+
+def inject_into_urllib3():
+    "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
+
+    _validate_dependencies_met()
+
+    util.SSLContext = PyOpenSSLContext
+    util.ssl_.SSLContext = PyOpenSSLContext
+    util.HAS_SNI = HAS_SNI
+    util.ssl_.HAS_SNI = HAS_SNI
+    util.IS_PYOPENSSL = True
+    util.ssl_.IS_PYOPENSSL = True
+
+
+def extract_from_urllib3():
+    "Undo monkey-patching by :func:`inject_into_urllib3`."
+
+    util.SSLContext = orig_util_SSLContext
+    util.ssl_.SSLContext = orig_util_SSLContext
+    util.HAS_SNI = orig_util_HAS_SNI
+    util.ssl_.HAS_SNI = orig_util_HAS_SNI
+    util.IS_PYOPENSSL = False
+    util.ssl_.IS_PYOPENSSL = False
+
+
+def _validate_dependencies_met():
+    """
+    Verifies that PyOpenSSL's package-level dependencies have been met.
+    Throws `ImportError` if they are not met.
+    """
+    # Method added in `cryptography==1.1`; not available in older versions
+    from cryptography.x509.extensions import Extensions
+
+    if getattr(Extensions, "get_extension_for_class", None) is None:
+        raise ImportError(
+            "'cryptography' module missing required functionality.  "
+            "Try upgrading to v1.3.4 or newer."
+        )
+
+    # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
+    # attribute is only present on those versions.
+    from OpenSSL.crypto import X509
+
+    x509 = X509()
+    if getattr(x509, "_x509", None) is None:
+        raise ImportError(
+            "'pyOpenSSL' module missing required functionality. "
+            "Try upgrading to v0.14 or newer."
+        )
+
+
+def _dnsname_to_stdlib(name):
+    """
+    Converts a dNSName SubjectAlternativeName field to the form used by the
+    standard library on the given Python version.
+
+    Cryptography produces a dNSName as a unicode string that was idna-decoded
+    from ASCII bytes. We need to idna-encode that string to get it back, and
+    then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
+    uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
+
+    If the name cannot be idna-encoded then we return None signalling that
+    the name given should be skipped.
+    """
+
+    def idna_encode(name):
+        """
+        Borrowed wholesale from the Python Cryptography Project. It turns out
+        that we can't just safely call `idna.encode`: it can explode for
+        wildcard names. This avoids that problem.
+        """
+        import idna
+
+        try:
+            for prefix in [u"*.", u"."]:
+                if name.startswith(prefix):
+                    name = name[len(prefix) :]
+                    return prefix.encode("ascii") + idna.encode(name)
+            return idna.encode(name)
+        except idna.core.IDNAError:
+            return None
+
+    # Don't send IPv6 addresses through the IDNA encoder.
+    if ":" in name:
+        return name
+
+    name = idna_encode(name)
+    if name is None:
+        return None
+    elif sys.version_info >= (3, 0):
+        name = name.decode("utf-8")
+    return name
+
+
+def get_subj_alt_name(peer_cert):
+    """
+    Given an PyOpenSSL certificate, provides all the subject alternative names.
+    """
+    # Pass the cert to cryptography, which has much better APIs for this.
+    if hasattr(peer_cert, "to_cryptography"):
+        cert = peer_cert.to_cryptography()
+    else:
+        # This is technically using private APIs, but should work across all
+        # relevant versions before PyOpenSSL got a proper API for this.
+        cert = _Certificate(openssl_backend, peer_cert._x509)
+
+    # We want to find the SAN extension. Ask Cryptography to locate it (it's
+    # faster than looping in Python)
+    try:
+        ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+    except x509.ExtensionNotFound:
+        # No such extension, return the empty list.
+        return []
+    except (
+        x509.DuplicateExtension,
+        UnsupportedExtension,
+        x509.UnsupportedGeneralNameType,
+        UnicodeError,
+    ) as e:
+        # A problem has been found with the quality of the certificate. Assume
+        # no SAN field is present.
+        log.warning(
+            "A problem was encountered with the certificate that prevented "
+            "urllib3 from finding the SubjectAlternativeName field. This can "
+            "affect certificate validation. The error was %s",
+            e,
+        )
+        return []
+
+    # We want to return dNSName and iPAddress fields. We need to cast the IPs
+    # back to strings because the match_hostname function wants them as
+    # strings.
+    # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
+    # decoded. This is pretty frustrating, but that's what the standard library
+    # does with certificates, and so we need to attempt to do the same.
+    # We also want to skip over names which cannot be idna encoded.
+    names = [
+        ("DNS", name)
+        for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+        if name is not None
+    ]
+    names.extend(
+        ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
+    )
+
+    return names
+
+
+class WrappedSocket(object):
+    """API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+    collector of pypy.
+    """
+
+    def __init__(self, connection, socket, suppress_ragged_eofs=True):
+        self.connection = connection
+        self.socket = socket
+        self.suppress_ragged_eofs = suppress_ragged_eofs
+        self._makefile_refs = 0
+        self._closed = False
+
+    def fileno(self):
+        return self.socket.fileno()
+
+    # Copy-pasted from Python 3.5 source code
+    def _decref_socketios(self):
+        if self._makefile_refs > 0:
+            self._makefile_refs -= 1
+        if self._closed:
+            self.close()
+
+    def recv(self, *args, **kwargs):
+        try:
+            data = self.connection.recv(*args, **kwargs)
+        except OpenSSL.SSL.SysCallError as e:
+            if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+                return b""
+            else:
+                raise SocketError(str(e))
+        except OpenSSL.SSL.ZeroReturnError:
+            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+                return b""
+            else:
+                raise
+        except OpenSSL.SSL.WantReadError:
+            if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+                raise timeout("The read operation timed out")
+            else:
+                return self.recv(*args, **kwargs)
+
+        # TLS 1.3 post-handshake authentication
+        except OpenSSL.SSL.Error as e:
+            raise ssl.SSLError("read error: %r" % e)
+        else:
+            return data
+
+    def recv_into(self, *args, **kwargs):
+        try:
+            return self.connection.recv_into(*args, **kwargs)
+        except OpenSSL.SSL.SysCallError as e:
+            if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+                return 0
+            else:
+                raise SocketError(str(e))
+        except OpenSSL.SSL.ZeroReturnError:
+            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+                return 0
+            else:
+                raise
+        except OpenSSL.SSL.WantReadError:
+            if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+                raise timeout("The read operation timed out")
+            else:
+                return self.recv_into(*args, **kwargs)
+
+        # TLS 1.3 post-handshake authentication
+        except OpenSSL.SSL.Error as e:
+            raise ssl.SSLError("read error: %r" % e)
+
+    def settimeout(self, timeout):
+        return self.socket.settimeout(timeout)
+
+    def _send_until_done(self, data):
+        while True:
+            try:
+                return self.connection.send(data)
+            except OpenSSL.SSL.WantWriteError:
+                if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+                    raise timeout()
+                continue
+            except OpenSSL.SSL.SysCallError as e:
+                raise SocketError(str(e))
+
+    def sendall(self, data):
+        total_sent = 0
+        while total_sent < len(data):
+            sent = self._send_until_done(
+                data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
+            )
+            total_sent += sent
+
+    def shutdown(self):
+        # FIXME rethrow compatible exceptions should we ever use this
+        self.connection.shutdown()
+
+    def close(self):
+        if self._makefile_refs < 1:
+            try:
+                self._closed = True
+                return self.connection.close()
+            except OpenSSL.SSL.Error:
+                return
+        else:
+            self._makefile_refs -= 1
+
+    def getpeercert(self, binary_form=False):
+        x509 = self.connection.get_peer_certificate()
+
+        if not x509:
+            return x509
+
+        if binary_form:
+            return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
+
+        return {
+            "subject": ((("commonName", x509.get_subject().CN),),),
+            "subjectAltName": get_subj_alt_name(x509),
+        }
+
+    def version(self):
+        return self.connection.get_protocol_version_name()
+
+    def _reuse(self):
+        self._makefile_refs += 1
+
+    def _drop(self):
+        if self._makefile_refs < 1:
+            self.close()
+        else:
+            self._makefile_refs -= 1
+
+
+if _fileobject:  # Platform-specific: Python 2
+
+    def makefile(self, mode, bufsize=-1):
+        self._makefile_refs += 1
+        return _fileobject(self, mode, bufsize, close=True)
+
+
+else:  # Platform-specific: Python 3
+    makefile = backport_makefile
+
+WrappedSocket.makefile = makefile
+
+
+class PyOpenSSLContext(object):
+    """
+    I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
+    for translating the interface of the standard library ``SSLContext`` object
+    to calls into PyOpenSSL.
+    """
+
+    def __init__(self, protocol):
+        self.protocol = _openssl_versions[protocol]
+        self._ctx = OpenSSL.SSL.Context(self.protocol)
+        self._options = 0
+        self.check_hostname = False
+
+    @property
+    def options(self):
+        return self._options
+
+    @options.setter
+    def options(self, value):
+        self._options = value
+        self._ctx.set_options(value)
+
+    @property
+    def verify_mode(self):
+        return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
+
+    @verify_mode.setter
+    def verify_mode(self, value):
+        self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
+
+    def set_default_verify_paths(self):
+        self._ctx.set_default_verify_paths()
+
+    def set_ciphers(self, ciphers):
+        if isinstance(ciphers, six.text_type):
+            ciphers = ciphers.encode("utf-8")
+        self._ctx.set_cipher_list(ciphers)
+
+    def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+        if cafile is not None:
+            cafile = cafile.encode("utf-8")
+        if capath is not None:
+            capath = capath.encode("utf-8")
+        try:
+            self._ctx.load_verify_locations(cafile, capath)
+            if cadata is not None:
+                self._ctx.load_verify_locations(BytesIO(cadata))
+        except OpenSSL.SSL.Error as e:
+            raise ssl.SSLError("unable to load trusted certificates: %r" % e)
+
+    def load_cert_chain(self, certfile, keyfile=None, password=None):
+        self._ctx.use_certificate_chain_file(certfile)
+        if password is not None:
+            if not isinstance(password, six.binary_type):
+                password = password.encode("utf-8")
+            self._ctx.set_passwd_cb(lambda *_: password)
+        self._ctx.use_privatekey_file(keyfile or certfile)
+
+    def set_alpn_protocols(self, protocols):
+        protocols = [six.ensure_binary(p) for p in protocols]
+        return self._ctx.set_alpn_protos(protocols)
+
+    def wrap_socket(
+        self,
+        sock,
+        server_side=False,
+        do_handshake_on_connect=True,
+        suppress_ragged_eofs=True,
+        server_hostname=None,
+    ):
+        cnx = OpenSSL.SSL.Connection(self._ctx, sock)
+
+        if isinstance(server_hostname, six.text_type):  # Platform-specific: Python 3
+            server_hostname = server_hostname.encode("utf-8")
+
+        if server_hostname is not None:
+            cnx.set_tlsext_host_name(server_hostname)
+
+        cnx.set_connect_state()
+
+        while True:
+            try:
+                cnx.do_handshake()
+            except OpenSSL.SSL.WantReadError:
+                if not util.wait_for_read(sock, sock.gettimeout()):
+                    raise timeout("select timed out")
+                continue
+            except OpenSSL.SSL.Error as e:
+                raise ssl.SSLError("bad handshake: %r" % e)
+            break
+
+        return WrappedSocket(cnx, sock)
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+    return err_no == 0
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py b/venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py
new file mode 100644
index 00000000..ab092de6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py
@@ -0,0 +1,920 @@
+"""
+SecureTranport support for urllib3 via ctypes.
+
+This makes platform-native TLS available to urllib3 users on macOS without the
+use of a compiler. This is an important feature because the Python Package
+Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
+that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
+this is to give macOS users an alternative solution to the problem, and that
+solution is to use SecureTransport.
+
+We use ctypes here because this solution must not require a compiler. That's
+because pip is not allowed to require a compiler either.
+
+This is not intended to be a seriously long-term solution to this problem.
+The hope is that PEP 543 will eventually solve this issue for us, at which
+point we can retire this contrib module. But in the short term, we need to
+solve the impending tire fire that is Python on Mac without this kind of
+contrib module. So...here we are.
+
+To use this module, simply import and inject it::
+
+    import urllib3.contrib.securetransport
+    urllib3.contrib.securetransport.inject_into_urllib3()
+
+Happy TLSing!
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+.. code-block::
+
+    Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+    Permission is hereby granted, free of charge, to any person obtaining a
+    copy of this software and associated documentation files (the "Software"),
+    to deal in the Software without restriction, including without limitation
+    the rights to use, copy, modify, merge, publish, distribute, sublicense,
+    and/or sell copies of the Software, and to permit persons to whom the
+    Software is furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+    DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import contextlib
+import ctypes
+import errno
+import os.path
+import shutil
+import socket
+import ssl
+import struct
+import threading
+import weakref
+
+import six
+
+from .. import util
+from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
+from ._securetransport.low_level import (
+    _assert_no_error,
+    _build_tls_unknown_ca_alert,
+    _cert_array_from_pem,
+    _create_cfstring_array,
+    _load_client_cert_chain,
+    _temporary_keychain,
+)
+
+try:  # Platform-specific: Python 2
+    from socket import _fileobject
+except ImportError:  # Platform-specific: Python 3
+    _fileobject = None
+    from ..packages.backports.makefile import backport_makefile
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works
+HAS_SNI = True
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+# This dictionary is used by the read callback to obtain a handle to the
+# calling wrapped socket. This is a pretty silly approach, but for now it'll
+# do. I feel like I should be able to smuggle a handle to the wrapped socket
+# directly in the SSLConnectionRef, but for now this approach will work I
+# guess.
+#
+# We need to lock around this structure for inserts, but we don't do it for
+# reads/writes in the callbacks. The reasoning here goes as follows:
+#
+#    1. It is not possible to call into the callbacks before the dictionary is
+#       populated, so once in the callback the id must be in the dictionary.
+#    2. The callbacks don't mutate the dictionary, they only read from it, and
+#       so cannot conflict with any of the insertions.
+#
+# This is good: if we had to lock in the callbacks we'd drastically slow down
+# the performance of this code.
+_connection_refs = weakref.WeakValueDictionary()
+_connection_ref_lock = threading.Lock()
+
+# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
+# for no better reason than we need *a* limit, and this one is right there.
+SSL_WRITE_BLOCKSIZE = 16384
+
+# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
+# individual cipher suites. We need to do this because this is how
+# SecureTransport wants them.
+CIPHER_SUITES = [
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+    SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
+    SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
+    SecurityConst.TLS_AES_256_GCM_SHA384,
+    SecurityConst.TLS_AES_128_GCM_SHA256,
+    SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
+    SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
+    SecurityConst.TLS_AES_128_CCM_8_SHA256,
+    SecurityConst.TLS_AES_128_CCM_SHA256,
+    SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
+    SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
+    SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
+    SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
+]
+
+# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
+# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
+# TLSv1 to 1.2 are supported on macOS 10.8+
+_protocol_to_min_max = {
+    util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12)
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv2"):
+    _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
+        SecurityConst.kSSLProtocol2,
+        SecurityConst.kSSLProtocol2,
+    )
+if hasattr(ssl, "PROTOCOL_SSLv3"):
+    _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
+        SecurityConst.kSSLProtocol3,
+        SecurityConst.kSSLProtocol3,
+    )
+if hasattr(ssl, "PROTOCOL_TLSv1"):
+    _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
+        SecurityConst.kTLSProtocol1,
+        SecurityConst.kTLSProtocol1,
+    )
+if hasattr(ssl, "PROTOCOL_TLSv1_1"):
+    _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
+        SecurityConst.kTLSProtocol11,
+        SecurityConst.kTLSProtocol11,
+    )
+if hasattr(ssl, "PROTOCOL_TLSv1_2"):
+    _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
+        SecurityConst.kTLSProtocol12,
+        SecurityConst.kTLSProtocol12,
+    )
+
+
+def inject_into_urllib3():
+    """
+    Monkey-patch urllib3 with SecureTransport-backed SSL-support.
+    """
+    util.SSLContext = SecureTransportContext
+    util.ssl_.SSLContext = SecureTransportContext
+    util.HAS_SNI = HAS_SNI
+    util.ssl_.HAS_SNI = HAS_SNI
+    util.IS_SECURETRANSPORT = True
+    util.ssl_.IS_SECURETRANSPORT = True
+
+
+def extract_from_urllib3():
+    """
+    Undo monkey-patching by :func:`inject_into_urllib3`.
+    """
+    util.SSLContext = orig_util_SSLContext
+    util.ssl_.SSLContext = orig_util_SSLContext
+    util.HAS_SNI = orig_util_HAS_SNI
+    util.ssl_.HAS_SNI = orig_util_HAS_SNI
+    util.IS_SECURETRANSPORT = False
+    util.ssl_.IS_SECURETRANSPORT = False
+
+
+def _read_callback(connection_id, data_buffer, data_length_pointer):
+    """
+    SecureTransport read callback. This is called by ST to request that data
+    be returned from the socket.
+    """
+    wrapped_socket = None
+    try:
+        wrapped_socket = _connection_refs.get(connection_id)
+        if wrapped_socket is None:
+            return SecurityConst.errSSLInternal
+        base_socket = wrapped_socket.socket
+
+        requested_length = data_length_pointer[0]
+
+        timeout = wrapped_socket.gettimeout()
+        error = None
+        read_count = 0
+
+        try:
+            while read_count < requested_length:
+                if timeout is None or timeout >= 0:
+                    if not util.wait_for_read(base_socket, timeout):
+                        raise socket.error(errno.EAGAIN, "timed out")
+
+                remaining = requested_length - read_count
+                buffer = (ctypes.c_char * remaining).from_address(
+                    data_buffer + read_count
+                )
+                chunk_size = base_socket.recv_into(buffer, remaining)
+                read_count += chunk_size
+                if not chunk_size:
+                    if not read_count:
+                        return SecurityConst.errSSLClosedGraceful
+                    break
+        except (socket.error) as e:
+            error = e.errno
+
+            if error is not None and error != errno.EAGAIN:
+                data_length_pointer[0] = read_count
+                if error == errno.ECONNRESET or error == errno.EPIPE:
+                    return SecurityConst.errSSLClosedAbort
+                raise
+
+        data_length_pointer[0] = read_count
+
+        if read_count != requested_length:
+            return SecurityConst.errSSLWouldBlock
+
+        return 0
+    except Exception as e:
+        if wrapped_socket is not None:
+            wrapped_socket._exception = e
+        return SecurityConst.errSSLInternal
+
+
+def _write_callback(connection_id, data_buffer, data_length_pointer):
+    """
+    SecureTransport write callback. This is called by ST to request that data
+    actually be sent on the network.
+    """
+    wrapped_socket = None
+    try:
+        wrapped_socket = _connection_refs.get(connection_id)
+        if wrapped_socket is None:
+            return SecurityConst.errSSLInternal
+        base_socket = wrapped_socket.socket
+
+        bytes_to_write = data_length_pointer[0]
+        data = ctypes.string_at(data_buffer, bytes_to_write)
+
+        timeout = wrapped_socket.gettimeout()
+        error = None
+        sent = 0
+
+        try:
+            while sent < bytes_to_write:
+                if timeout is None or timeout >= 0:
+                    if not util.wait_for_write(base_socket, timeout):
+                        raise socket.error(errno.EAGAIN, "timed out")
+                chunk_sent = base_socket.send(data)
+                sent += chunk_sent
+
+                # This has some needless copying here, but I'm not sure there's
+                # much value in optimising this data path.
+                data = data[chunk_sent:]
+        except (socket.error) as e:
+            error = e.errno
+
+            if error is not None and error != errno.EAGAIN:
+                data_length_pointer[0] = sent
+                if error == errno.ECONNRESET or error == errno.EPIPE:
+                    return SecurityConst.errSSLClosedAbort
+                raise
+
+        data_length_pointer[0] = sent
+
+        if sent != bytes_to_write:
+            return SecurityConst.errSSLWouldBlock
+
+        return 0
+    except Exception as e:
+        if wrapped_socket is not None:
+            wrapped_socket._exception = e
+        return SecurityConst.errSSLInternal
+
+
+# We need to keep these two objects references alive: if they get GC'd while
+# in use then SecureTransport could attempt to call a function that is in freed
+# memory. That would be...uh...bad. Yeah, that's the word. Bad.
+_read_callback_pointer = Security.SSLReadFunc(_read_callback)
+_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
+
+
+class WrappedSocket(object):
+    """
+    API-compatibility wrapper for Python's OpenSSL wrapped socket object.
+
+    Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
+    collector of PyPy.
+    """
+
+    def __init__(self, socket):
+        self.socket = socket
+        self.context = None
+        self._makefile_refs = 0
+        self._closed = False
+        self._exception = None
+        self._keychain = None
+        self._keychain_dir = None
+        self._client_cert_chain = None
+
+        # We save off the previously-configured timeout and then set it to
+        # zero. This is done because we use select and friends to handle the
+        # timeouts, but if we leave the timeout set on the lower socket then
+        # Python will "kindly" call select on that socket again for us. Avoid
+        # that by forcing the timeout to zero.
+        self._timeout = self.socket.gettimeout()
+        self.socket.settimeout(0)
+
+    @contextlib.contextmanager
+    def _raise_on_error(self):
+        """
+        A context manager that can be used to wrap calls that do I/O from
+        SecureTransport. If any of the I/O callbacks hit an exception, this
+        context manager will correctly propagate the exception after the fact.
+        This avoids silently swallowing those exceptions.
+
+        It also correctly forces the socket closed.
+        """
+        self._exception = None
+
+        # We explicitly don't catch around this yield because in the unlikely
+        # event that an exception was hit in the block we don't want to swallow
+        # it.
+        yield
+        if self._exception is not None:
+            exception, self._exception = self._exception, None
+            self.close()
+            raise exception
+
+    def _set_ciphers(self):
+        """
+        Sets up the allowed ciphers. By default this matches the set in
+        util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
+        custom and doesn't allow changing at this time, mostly because parsing
+        OpenSSL cipher strings is going to be a freaking nightmare.
+        """
+        ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
+        result = Security.SSLSetEnabledCiphers(
+            self.context, ciphers, len(CIPHER_SUITES)
+        )
+        _assert_no_error(result)
+
+    def _set_alpn_protocols(self, protocols):
+        """
+        Sets up the ALPN protocols on the context.
+        """
+        if not protocols:
+            return
+        protocols_arr = _create_cfstring_array(protocols)
+        try:
+            result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
+            _assert_no_error(result)
+        finally:
+            CoreFoundation.CFRelease(protocols_arr)
+
+    def _custom_validate(self, verify, trust_bundle):
+        """
+        Called when we have set custom validation. We do this in two cases:
+        first, when cert validation is entirely disabled; and second, when
+        using a custom trust DB.
+        Raises an SSLError if the connection is not trusted.
+        """
+        # If we disabled cert validation, just say: cool.
+        if not verify:
+            return
+
+        successes = (
+            SecurityConst.kSecTrustResultUnspecified,
+            SecurityConst.kSecTrustResultProceed,
+        )
+        try:
+            trust_result = self._evaluate_trust(trust_bundle)
+            if trust_result in successes:
+                return
+            reason = "error code: %d" % (trust_result,)
+        except Exception as e:
+            # Do not trust on error
+            reason = "exception: %r" % (e,)
+
+        # SecureTransport does not send an alert nor shuts down the connection.
+        rec = _build_tls_unknown_ca_alert(self.version())
+        self.socket.sendall(rec)
+        # close the connection immediately
+        # l_onoff = 1, activate linger
+        # l_linger = 0, linger for 0 seoncds
+        opts = struct.pack("ii", 1, 0)
+        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
+        self.close()
+        raise ssl.SSLError("certificate verify failed, %s" % reason)
+
+    def _evaluate_trust(self, trust_bundle):
+        # We want data in memory, so load it up.
+        if os.path.isfile(trust_bundle):
+            with open(trust_bundle, "rb") as f:
+                trust_bundle = f.read()
+
+        cert_array = None
+        trust = Security.SecTrustRef()
+
+        try:
+            # Get a CFArray that contains the certs we want.
+            cert_array = _cert_array_from_pem(trust_bundle)
+
+            # Ok, now the hard part. We want to get the SecTrustRef that ST has
+            # created for this connection, shove our CAs into it, tell ST to
+            # ignore everything else it knows, and then ask if it can build a
+            # chain. This is a buuuunch of code.
+            result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+            _assert_no_error(result)
+            if not trust:
+                raise ssl.SSLError("Failed to copy trust reference")
+
+            result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
+            _assert_no_error(result)
+
+            result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
+            _assert_no_error(result)
+
+            trust_result = Security.SecTrustResultType()
+            result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
+            _assert_no_error(result)
+        finally:
+            if trust:
+                CoreFoundation.CFRelease(trust)
+
+            if cert_array is not None:
+                CoreFoundation.CFRelease(cert_array)
+
+        return trust_result.value
+
+    def handshake(
+        self,
+        server_hostname,
+        verify,
+        trust_bundle,
+        min_version,
+        max_version,
+        client_cert,
+        client_key,
+        client_key_passphrase,
+        alpn_protocols,
+    ):
+        """
+        Actually performs the TLS handshake. This is run automatically by
+        wrapped socket, and shouldn't be needed in user code.
+        """
+        # First, we do the initial bits of connection setup. We need to create
+        # a context, set its I/O funcs, and set the connection reference.
+        self.context = Security.SSLCreateContext(
+            None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
+        )
+        result = Security.SSLSetIOFuncs(
+            self.context, _read_callback_pointer, _write_callback_pointer
+        )
+        _assert_no_error(result)
+
+        # Here we need to compute the handle to use. We do this by taking the
+        # id of self modulo 2**31 - 1. If this is already in the dictionary, we
+        # just keep incrementing by one until we find a free space.
+        with _connection_ref_lock:
+            handle = id(self) % 2147483647
+            while handle in _connection_refs:
+                handle = (handle + 1) % 2147483647
+            _connection_refs[handle] = self
+
+        result = Security.SSLSetConnection(self.context, handle)
+        _assert_no_error(result)
+
+        # If we have a server hostname, we should set that too.
+        if server_hostname:
+            if not isinstance(server_hostname, bytes):
+                server_hostname = server_hostname.encode("utf-8")
+
+            result = Security.SSLSetPeerDomainName(
+                self.context, server_hostname, len(server_hostname)
+            )
+            _assert_no_error(result)
+
+        # Setup the ciphers.
+        self._set_ciphers()
+
+        # Setup the ALPN protocols.
+        self._set_alpn_protocols(alpn_protocols)
+
+        # Set the minimum and maximum TLS versions.
+        result = Security.SSLSetProtocolVersionMin(self.context, min_version)
+        _assert_no_error(result)
+
+        result = Security.SSLSetProtocolVersionMax(self.context, max_version)
+        _assert_no_error(result)
+
+        # If there's a trust DB, we need to use it. We do that by telling
+        # SecureTransport to break on server auth. We also do that if we don't
+        # want to validate the certs at all: we just won't actually do any
+        # authing in that case.
+        if not verify or trust_bundle is not None:
+            result = Security.SSLSetSessionOption(
+                self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
+            )
+            _assert_no_error(result)
+
+        # If there's a client cert, we need to use it.
+        if client_cert:
+            self._keychain, self._keychain_dir = _temporary_keychain()
+            self._client_cert_chain = _load_client_cert_chain(
+                self._keychain, client_cert, client_key
+            )
+            result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
+            _assert_no_error(result)
+
+        while True:
+            with self._raise_on_error():
+                result = Security.SSLHandshake(self.context)
+
+                if result == SecurityConst.errSSLWouldBlock:
+                    raise socket.timeout("handshake timed out")
+                elif result == SecurityConst.errSSLServerAuthCompleted:
+                    self._custom_validate(verify, trust_bundle)
+                    continue
+                else:
+                    _assert_no_error(result)
+                    break
+
+    def fileno(self):
+        return self.socket.fileno()
+
+    # Copy-pasted from Python 3.5 source code
+    def _decref_socketios(self):
+        if self._makefile_refs > 0:
+            self._makefile_refs -= 1
+        if self._closed:
+            self.close()
+
+    def recv(self, bufsiz):
+        buffer = ctypes.create_string_buffer(bufsiz)
+        bytes_read = self.recv_into(buffer, bufsiz)
+        data = buffer[:bytes_read]
+        return data
+
+    def recv_into(self, buffer, nbytes=None):
+        # Read short on EOF.
+        if self._closed:
+            return 0
+
+        if nbytes is None:
+            nbytes = len(buffer)
+
+        buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
+        processed_bytes = ctypes.c_size_t(0)
+
+        with self._raise_on_error():
+            result = Security.SSLRead(
+                self.context, buffer, nbytes, ctypes.byref(processed_bytes)
+            )
+
+        # There are some result codes that we want to treat as "not always
+        # errors". Specifically, those are errSSLWouldBlock,
+        # errSSLClosedGraceful, and errSSLClosedNoNotify.
+        if result == SecurityConst.errSSLWouldBlock:
+            # If we didn't process any bytes, then this was just a time out.
+            # However, we can get errSSLWouldBlock in situations when we *did*
+            # read some data, and in those cases we should just read "short"
+            # and return.
+            if processed_bytes.value == 0:
+                # Timed out, no data read.
+                raise socket.timeout("recv timed out")
+        elif result in (
+            SecurityConst.errSSLClosedGraceful,
+            SecurityConst.errSSLClosedNoNotify,
+        ):
+            # The remote peer has closed this connection. We should do so as
+            # well. Note that we don't actually return here because in
+            # principle this could actually be fired along with return data.
+            # It's unlikely though.
+            self.close()
+        else:
+            _assert_no_error(result)
+
+        # Ok, we read and probably succeeded. We should return whatever data
+        # was actually read.
+        return processed_bytes.value
+
+    def settimeout(self, timeout):
+        self._timeout = timeout
+
+    def gettimeout(self):
+        return self._timeout
+
+    def send(self, data):
+        processed_bytes = ctypes.c_size_t(0)
+
+        with self._raise_on_error():
+            result = Security.SSLWrite(
+                self.context, data, len(data), ctypes.byref(processed_bytes)
+            )
+
+        if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
+            # Timed out
+            raise socket.timeout("send timed out")
+        else:
+            _assert_no_error(result)
+
+        # We sent, and probably succeeded. Tell them how much we sent.
+        return processed_bytes.value
+
+    def sendall(self, data):
+        total_sent = 0
+        while total_sent < len(data):
+            sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
+            total_sent += sent
+
+    def shutdown(self):
+        with self._raise_on_error():
+            Security.SSLClose(self.context)
+
+    def close(self):
+        # TODO: should I do clean shutdown here? Do I have to?
+        if self._makefile_refs < 1:
+            self._closed = True
+            if self.context:
+                CoreFoundation.CFRelease(self.context)
+                self.context = None
+            if self._client_cert_chain:
+                CoreFoundation.CFRelease(self._client_cert_chain)
+                self._client_cert_chain = None
+            if self._keychain:
+                Security.SecKeychainDelete(self._keychain)
+                CoreFoundation.CFRelease(self._keychain)
+                shutil.rmtree(self._keychain_dir)
+                self._keychain = self._keychain_dir = None
+            return self.socket.close()
+        else:
+            self._makefile_refs -= 1
+
+    def getpeercert(self, binary_form=False):
+        # Urgh, annoying.
+        #
+        # Here's how we do this:
+        #
+        # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
+        #    connection.
+        # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
+        # 3. To get the CN, call SecCertificateCopyCommonName and process that
+        #    string so that it's of the appropriate type.
+        # 4. To get the SAN, we need to do something a bit more complex:
+        #    a. Call SecCertificateCopyValues to get the data, requesting
+        #       kSecOIDSubjectAltName.
+        #    b. Mess about with this dictionary to try to get the SANs out.
+        #
+        # This is gross. Really gross. It's going to be a few hundred LoC extra
+        # just to repeat something that SecureTransport can *already do*. So my
+        # operating assumption at this time is that what we want to do is
+        # instead to just flag to urllib3 that it shouldn't do its own hostname
+        # validation when using SecureTransport.
+        if not binary_form:
+            raise ValueError("SecureTransport only supports dumping binary certs")
+        trust = Security.SecTrustRef()
+        certdata = None
+        der_bytes = None
+
+        try:
+            # Grab the trust store.
+            result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+            _assert_no_error(result)
+            if not trust:
+                # Probably we haven't done the handshake yet. No biggie.
+                return None
+
+            cert_count = Security.SecTrustGetCertificateCount(trust)
+            if not cert_count:
+                # Also a case that might happen if we haven't handshaked.
+                # Handshook? Handshaken?
+                return None
+
+            leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
+            assert leaf
+
+            # Ok, now we want the DER bytes.
+            certdata = Security.SecCertificateCopyData(leaf)
+            assert certdata
+
+            data_length = CoreFoundation.CFDataGetLength(certdata)
+            data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
+            der_bytes = ctypes.string_at(data_buffer, data_length)
+        finally:
+            if certdata:
+                CoreFoundation.CFRelease(certdata)
+            if trust:
+                CoreFoundation.CFRelease(trust)
+
+        return der_bytes
+
+    def version(self):
+        protocol = Security.SSLProtocol()
+        result = Security.SSLGetNegotiatedProtocolVersion(
+            self.context, ctypes.byref(protocol)
+        )
+        _assert_no_error(result)
+        if protocol.value == SecurityConst.kTLSProtocol13:
+            raise ssl.SSLError("SecureTransport does not support TLS 1.3")
+        elif protocol.value == SecurityConst.kTLSProtocol12:
+            return "TLSv1.2"
+        elif protocol.value == SecurityConst.kTLSProtocol11:
+            return "TLSv1.1"
+        elif protocol.value == SecurityConst.kTLSProtocol1:
+            return "TLSv1"
+        elif protocol.value == SecurityConst.kSSLProtocol3:
+            return "SSLv3"
+        elif protocol.value == SecurityConst.kSSLProtocol2:
+            return "SSLv2"
+        else:
+            raise ssl.SSLError("Unknown TLS version: %r" % protocol)
+
+    def _reuse(self):
+        self._makefile_refs += 1
+
+    def _drop(self):
+        if self._makefile_refs < 1:
+            self.close()
+        else:
+            self._makefile_refs -= 1
+
+
+if _fileobject:  # Platform-specific: Python 2
+
+    def makefile(self, mode, bufsize=-1):
+        self._makefile_refs += 1
+        return _fileobject(self, mode, bufsize, close=True)
+
+
+else:  # Platform-specific: Python 3
+
+    def makefile(self, mode="r", buffering=None, *args, **kwargs):
+        # We disable buffering with SecureTransport because it conflicts with
+        # the buffering that ST does internally (see issue #1153 for more).
+        buffering = 0
+        return backport_makefile(self, mode, buffering, *args, **kwargs)
+
+
+WrappedSocket.makefile = makefile
+
+
+class SecureTransportContext(object):
+    """
+    I am a wrapper class for the SecureTransport library, to translate the
+    interface of the standard library ``SSLContext`` object to calls into
+    SecureTransport.
+    """
+
+    def __init__(self, protocol):
+        self._min_version, self._max_version = _protocol_to_min_max[protocol]
+        self._options = 0
+        self._verify = False
+        self._trust_bundle = None
+        self._client_cert = None
+        self._client_key = None
+        self._client_key_passphrase = None
+        self._alpn_protocols = None
+
+    @property
+    def check_hostname(self):
+        """
+        SecureTransport cannot have its hostname checking disabled. For more,
+        see the comment on getpeercert() in this file.
+        """
+        return True
+
+    @check_hostname.setter
+    def check_hostname(self, value):
+        """
+        SecureTransport cannot have its hostname checking disabled. For more,
+        see the comment on getpeercert() in this file.
+        """
+        pass
+
+    @property
+    def options(self):
+        # TODO: Well, crap.
+        #
+        # So this is the bit of the code that is the most likely to cause us
+        # trouble. Essentially we need to enumerate all of the SSL options that
+        # users might want to use and try to see if we can sensibly translate
+        # them, or whether we should just ignore them.
+        return self._options
+
+    @options.setter
+    def options(self, value):
+        # TODO: Update in line with above.
+        self._options = value
+
+    @property
+    def verify_mode(self):
+        return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
+
+    @verify_mode.setter
+    def verify_mode(self, value):
+        self._verify = True if value == ssl.CERT_REQUIRED else False
+
+    def set_default_verify_paths(self):
+        # So, this has to do something a bit weird. Specifically, what it does
+        # is nothing.
+        #
+        # This means that, if we had previously had load_verify_locations
+        # called, this does not undo that. We need to do that because it turns
+        # out that the rest of the urllib3 code will attempt to load the
+        # default verify paths if it hasn't been told about any paths, even if
+        # the context itself was sometime earlier. We resolve that by just
+        # ignoring it.
+        pass
+
+    def load_default_certs(self):
+        return self.set_default_verify_paths()
+
+    def set_ciphers(self, ciphers):
+        # For now, we just require the default cipher string.
+        if ciphers != util.ssl_.DEFAULT_CIPHERS:
+            raise ValueError("SecureTransport doesn't support custom cipher strings")
+
+    def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+        # OK, we only really support cadata and cafile.
+        if capath is not None:
+            raise ValueError("SecureTransport does not support cert directories")
+
+        # Raise if cafile does not exist.
+        if cafile is not None:
+            with open(cafile):
+                pass
+
+        self._trust_bundle = cafile or cadata
+
+    def load_cert_chain(self, certfile, keyfile=None, password=None):
+        self._client_cert = certfile
+        self._client_key = keyfile
+        self._client_cert_passphrase = password
+
+    def set_alpn_protocols(self, protocols):
+        """
+        Sets the ALPN protocols that will later be set on the context.
+
+        Raises a NotImplementedError if ALPN is not supported.
+        """
+        if not hasattr(Security, "SSLSetALPNProtocols"):
+            raise NotImplementedError(
+                "SecureTransport supports ALPN only in macOS 10.12+"
+            )
+        self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
+
+    def wrap_socket(
+        self,
+        sock,
+        server_side=False,
+        do_handshake_on_connect=True,
+        suppress_ragged_eofs=True,
+        server_hostname=None,
+    ):
+        # So, what do we do here? Firstly, we assert some properties. This is a
+        # stripped down shim, so there is some functionality we don't support.
+        # See PEP 543 for the real deal.
+        assert not server_side
+        assert do_handshake_on_connect
+        assert suppress_ragged_eofs
+
+        # Ok, we're good to go. Now we want to create the wrapped socket object
+        # and store it in the appropriate place.
+        wrapped_socket = WrappedSocket(sock)
+
+        # Now we can handshake
+        wrapped_socket.handshake(
+            server_hostname,
+            self._verify,
+            self._trust_bundle,
+            self._min_version,
+            self._max_version,
+            self._client_cert,
+            self._client_key,
+            self._client_key_passphrase,
+            self._alpn_protocols,
+        )
+        return wrapped_socket
diff --git a/venv/lib/python3.7/site-packages/urllib3/contrib/socks.py b/venv/lib/python3.7/site-packages/urllib3/contrib/socks.py
new file mode 100644
index 00000000..93df8325
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/contrib/socks.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+"""
+This module contains provisional support for SOCKS proxies from within
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+SOCKS5. To enable its functionality, either install PySocks or install this
+module with the ``socks`` extra.
+
+The SOCKS implementation supports the full range of urllib3 features. It also
+supports the following SOCKS features:
+
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- Usernames and passwords for the SOCKS proxy
+
+.. note::
+   It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+   your ``proxy_url`` to ensure that DNS resolution is done from the remote
+   server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request:
+
+.. code-block:: python
+
+    proxy_url="socks4a://<userid>@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy:
+
+.. code-block:: python
+
+    proxy_url="socks5h://<username>:<password>@proxy-host"
+
+"""
+from __future__ import absolute_import
+
+try:
+    import socks
+except ImportError:
+    import warnings
+
+    from ..exceptions import DependencyWarning
+
+    warnings.warn(
+        (
+            "SOCKS support in urllib3 requires the installation of optional "
+            "dependencies: specifically, PySocks.  For more information, see "
+            "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
+        ),
+        DependencyWarning,
+    )
+    raise
+
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from ..connection import HTTPConnection, HTTPSConnection
+from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
+
+try:
+    import ssl
+except ImportError:
+    ssl = None
+
+
+class SOCKSConnection(HTTPConnection):
+    """
+    A plain-text HTTP connection that connects via a SOCKS proxy.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self._socks_options = kwargs.pop("_socks_options")
+        super(SOCKSConnection, self).__init__(*args, **kwargs)
+
+    def _new_conn(self):
+        """
+        Establish a new connection via the SOCKS proxy.
+        """
+        extra_kw = {}
+        if self.source_address:
+            extra_kw["source_address"] = self.source_address
+
+        if self.socket_options:
+            extra_kw["socket_options"] = self.socket_options
+
+        try:
+            conn = socks.create_connection(
+                (self.host, self.port),
+                proxy_type=self._socks_options["socks_version"],
+                proxy_addr=self._socks_options["proxy_host"],
+                proxy_port=self._socks_options["proxy_port"],
+                proxy_username=self._socks_options["username"],
+                proxy_password=self._socks_options["password"],
+                proxy_rdns=self._socks_options["rdns"],
+                timeout=self.timeout,
+                **extra_kw
+            )
+
+        except SocketTimeout:
+            raise ConnectTimeoutError(
+                self,
+                "Connection to %s timed out. (connect timeout=%s)"
+                % (self.host, self.timeout),
+            )
+
+        except socks.ProxyError as e:
+            # This is fragile as hell, but it seems to be the only way to raise
+            # useful errors here.
+            if e.socket_err:
+                error = e.socket_err
+                if isinstance(error, SocketTimeout):
+                    raise ConnectTimeoutError(
+                        self,
+                        "Connection to %s timed out. (connect timeout=%s)"
+                        % (self.host, self.timeout),
+                    )
+                else:
+                    raise NewConnectionError(
+                        self, "Failed to establish a new connection: %s" % error
+                    )
+            else:
+                raise NewConnectionError(
+                    self, "Failed to establish a new connection: %s" % e
+                )
+
+        except SocketError as e:  # Defensive: PySocks should catch all these.
+            raise NewConnectionError(
+                self, "Failed to establish a new connection: %s" % e
+            )
+
+        return conn
+
+
+# We don't need to duplicate the Verified/Unverified distinction from
+# urllib3/connection.py here because the HTTPSConnection will already have been
+# correctly set to either the Verified or Unverified form by that module. This
+# means the SOCKSHTTPSConnection will automatically be the correct type.
+class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
+    pass
+
+
+class SOCKSHTTPConnectionPool(HTTPConnectionPool):
+    ConnectionCls = SOCKSConnection
+
+
+class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
+    ConnectionCls = SOCKSHTTPSConnection
+
+
+class SOCKSProxyManager(PoolManager):
+    """
+    A version of the urllib3 ProxyManager that routes connections via the
+    defined SOCKS proxy.
+    """
+
+    pool_classes_by_scheme = {
+        "http": SOCKSHTTPConnectionPool,
+        "https": SOCKSHTTPSConnectionPool,
+    }
+
+    def __init__(
+        self,
+        proxy_url,
+        username=None,
+        password=None,
+        num_pools=10,
+        headers=None,
+        **connection_pool_kw
+    ):
+        parsed = parse_url(proxy_url)
+
+        if username is None and password is None and parsed.auth is not None:
+            split = parsed.auth.split(":")
+            if len(split) == 2:
+                username, password = split
+        if parsed.scheme == "socks5":
+            socks_version = socks.PROXY_TYPE_SOCKS5
+            rdns = False
+        elif parsed.scheme == "socks5h":
+            socks_version = socks.PROXY_TYPE_SOCKS5
+            rdns = True
+        elif parsed.scheme == "socks4":
+            socks_version = socks.PROXY_TYPE_SOCKS4
+            rdns = False
+        elif parsed.scheme == "socks4a":
+            socks_version = socks.PROXY_TYPE_SOCKS4
+            rdns = True
+        else:
+            raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
+
+        self.proxy_url = proxy_url
+
+        socks_options = {
+            "socks_version": socks_version,
+            "proxy_host": parsed.host,
+            "proxy_port": parsed.port,
+            "username": username,
+            "password": password,
+            "rdns": rdns,
+        }
+        connection_pool_kw["_socks_options"] = socks_options
+
+        super(SOCKSProxyManager, self).__init__(
+            num_pools, headers, **connection_pool_kw
+        )
+
+        self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git a/venv/lib/python3.7/site-packages/urllib3/exceptions.py b/venv/lib/python3.7/site-packages/urllib3/exceptions.py
new file mode 100644
index 00000000..cba6f3f5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/exceptions.py
@@ -0,0 +1,323 @@
+from __future__ import absolute_import
+
+from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
+
+# Base Exceptions
+
+
+class HTTPError(Exception):
+    """Base exception used by this module."""
+
+    pass
+
+
+class HTTPWarning(Warning):
+    """Base warning used by this module."""
+
+    pass
+
+
+class PoolError(HTTPError):
+    """Base exception for errors caused within a pool."""
+
+    def __init__(self, pool, message):
+        self.pool = pool
+        HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+    def __reduce__(self):
+        # For pickling purposes.
+        return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+    """Base exception for PoolErrors that have associated URLs."""
+
+    def __init__(self, pool, url, message):
+        self.url = url
+        PoolError.__init__(self, pool, message)
+
+    def __reduce__(self):
+        # For pickling purposes.
+        return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+    """Raised when SSL certificate fails in an HTTPS connection."""
+
+    pass
+
+
+class ProxyError(HTTPError):
+    """Raised when the connection to a proxy fails."""
+
+    def __init__(self, message, error, *args):
+        super(ProxyError, self).__init__(message, error, *args)
+        self.original_error = error
+
+
+class DecodeError(HTTPError):
+    """Raised when automatic decoding based on Content-Type fails."""
+
+    pass
+
+
+class ProtocolError(HTTPError):
+    """Raised when something unexpected happens mid-request/response."""
+
+    pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+
+class MaxRetryError(RequestError):
+    """Raised when the maximum number of retries is exceeded.
+
+    :param pool: The connection pool
+    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+    :param string url: The requested Url
+    :param exceptions.Exception reason: The underlying error
+
+    """
+
+    def __init__(self, pool, url, reason=None):
+        self.reason = reason
+
+        message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
+
+        RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+    """Raised when an existing pool gets a request for a foreign host."""
+
+    def __init__(self, pool, url, retries=3):
+        message = "Tried to open a foreign host with url: %s" % url
+        RequestError.__init__(self, pool, url, message)
+        self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+    """Raised when passing an invalid state to a timeout"""
+
+    pass
+
+
+class TimeoutError(HTTPError):
+    """Raised when a socket timeout error occurs.
+
+    Catching this error will catch both :exc:`ReadTimeoutErrors
+    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+    """
+
+    pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+    """Raised when a socket timeout occurs while receiving data from a server"""
+
+    pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+    """Raised when a socket timeout occurs while connecting to a server"""
+
+    pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+    """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
+
+    pass
+
+
+class EmptyPoolError(PoolError):
+    """Raised when a pool runs out of connections and no more are allowed."""
+
+    pass
+
+
+class ClosedPoolError(PoolError):
+    """Raised when a request enters a pool after the pool has been closed."""
+
+    pass
+
+
+class LocationValueError(ValueError, HTTPError):
+    """Raised when there is something wrong with a given URL input."""
+
+    pass
+
+
+class LocationParseError(LocationValueError):
+    """Raised when get_host or similar fails to parse the URL input."""
+
+    def __init__(self, location):
+        message = "Failed to parse: %s" % location
+        HTTPError.__init__(self, message)
+
+        self.location = location
+
+
+class URLSchemeUnknown(LocationValueError):
+    """Raised when a URL input has an unsupported scheme."""
+
+    def __init__(self, scheme):
+        message = "Not supported URL scheme %s" % scheme
+        super(URLSchemeUnknown, self).__init__(message)
+
+        self.scheme = scheme
+
+
+class ResponseError(HTTPError):
+    """Used as a container for an error reason supplied in a MaxRetryError."""
+
+    GENERIC_ERROR = "too many error responses"
+    SPECIFIC_ERROR = "too many {status_code} error responses"
+
+
+class SecurityWarning(HTTPWarning):
+    """Warned when performing security reducing actions"""
+
+    pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+    """Warned when connecting to a host with a certificate missing a SAN."""
+
+    pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+    """Warned when making an unverified HTTPS request."""
+
+    pass
+
+
+class SystemTimeWarning(SecurityWarning):
+    """Warned when system time is suspected to be wrong"""
+
+    pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+    """Warned when certain TLS/SSL configuration is not available on a platform."""
+
+    pass
+
+
+class SNIMissingWarning(HTTPWarning):
+    """Warned when making a HTTPS request without SNI available."""
+
+    pass
+
+
+class DependencyWarning(HTTPWarning):
+    """
+    Warned when an attempt is made to import a module with missing optional
+    dependencies.
+    """
+
+    pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+    """Response needs to be chunked in order to read it as chunks."""
+
+    pass
+
+
+class BodyNotHttplibCompatible(HTTPError):
+    """
+    Body should be :class:`http.client.HTTPResponse` like
+    (have an fp attribute which returns raw chunks) for read_chunked().
+    """
+
+    pass
+
+
+class IncompleteRead(HTTPError, httplib_IncompleteRead):
+    """
+    Response length doesn't match expected Content-Length
+
+    Subclass of :class:`http.client.IncompleteRead` to allow int value
+    for ``partial`` to avoid creating large objects on streamed reads.
+    """
+
+    def __init__(self, partial, expected):
+        super(IncompleteRead, self).__init__(partial, expected)
+
+    def __repr__(self):
+        return "IncompleteRead(%i bytes read, %i more expected)" % (
+            self.partial,
+            self.expected,
+        )
+
+
+class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
+    """Invalid chunk length in a chunked response."""
+
+    def __init__(self, response, length):
+        super(InvalidChunkLength, self).__init__(
+            response.tell(), response.length_remaining
+        )
+        self.response = response
+        self.length = length
+
+    def __repr__(self):
+        return "InvalidChunkLength(got length %r, %i bytes read)" % (
+            self.length,
+            self.partial,
+        )
+
+
+class InvalidHeader(HTTPError):
+    """The header provided was somehow invalid."""
+
+    pass
+
+
+class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
+    """ProxyManager does not support the supplied scheme"""
+
+    # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+    def __init__(self, scheme):
+        # 'localhost' is here because our URL parser parses
+        # localhost:8080 -> scheme=localhost, remove if we fix this.
+        if scheme == "localhost":
+            scheme = None
+        if scheme is None:
+            message = "Proxy URL had no scheme, should start with http:// or https://"
+        else:
+            message = (
+                "Proxy URL had unsupported scheme %s, should use http:// or https://"
+                % scheme
+            )
+        super(ProxySchemeUnknown, self).__init__(message)
+
+
+class ProxySchemeUnsupported(ValueError):
+    """Fetching HTTPS resources through HTTPS proxies is unsupported"""
+
+    pass
+
+
+class HeaderParsingError(HTTPError):
+    """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
+
+    def __init__(self, defects, unparsed_data):
+        message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
+        super(HeaderParsingError, self).__init__(message)
+
+
+class UnrewindableBodyError(HTTPError):
+    """urllib3 encountered an error when trying to rewind a body"""
+
+    pass
diff --git a/venv/lib/python3.7/site-packages/urllib3/fields.py b/venv/lib/python3.7/site-packages/urllib3/fields.py
new file mode 100644
index 00000000..9d630f49
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/fields.py
@@ -0,0 +1,274 @@
+from __future__ import absolute_import
+
+import email.utils
+import mimetypes
+import re
+
+from .packages import six
+
+
+def guess_content_type(filename, default="application/octet-stream"):
+    """
+    Guess the "Content-Type" of a file.
+
+    :param filename:
+        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+    :param default:
+        If no "Content-Type" can be guessed, default to `default`.
+    """
+    if filename:
+        return mimetypes.guess_type(filename)[0] or default
+    return default
+
+
+def format_header_param_rfc2231(name, value):
+    """
+    Helper function to format and quote a single header parameter using the
+    strategy defined in RFC 2231.
+
+    Particularly useful for header parameters which might contain
+    non-ASCII values, like file names. This follows
+    `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
+
+    :param name:
+        The name of the parameter, a string expected to be ASCII only.
+    :param value:
+        The value of the parameter, provided as ``bytes`` or `str``.
+    :ret:
+        An RFC-2231-formatted unicode string.
+    """
+    if isinstance(value, six.binary_type):
+        value = value.decode("utf-8")
+
+    if not any(ch in value for ch in '"\\\r\n'):
+        result = u'%s="%s"' % (name, value)
+        try:
+            result.encode("ascii")
+        except (UnicodeEncodeError, UnicodeDecodeError):
+            pass
+        else:
+            return result
+
+    if six.PY2:  # Python 2:
+        value = value.encode("utf-8")
+
+    # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
+    # string in Python 2 but accepts and returns unicode strings in Python 3
+    value = email.utils.encode_rfc2231(value, "utf-8")
+    value = "%s*=%s" % (name, value)
+
+    if six.PY2:  # Python 2:
+        value = value.decode("utf-8")
+
+    return value
+
+
+_HTML5_REPLACEMENTS = {
+    u"\u0022": u"%22",
+    # Replace "\" with "\\".
+    u"\u005C": u"\u005C\u005C",
+}
+
+# All control characters from 0x00 to 0x1F *except* 0x1B.
+_HTML5_REPLACEMENTS.update(
+    {
+        six.unichr(cc): u"%{:02X}".format(cc)
+        for cc in range(0x00, 0x1F + 1)
+        if cc not in (0x1B,)
+    }
+)
+
+
+def _replace_multiple(value, needles_and_replacements):
+    def replacer(match):
+        return needles_and_replacements[match.group(0)]
+
+    pattern = re.compile(
+        r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
+    )
+
+    result = pattern.sub(replacer, value)
+
+    return result
+
+
+def format_header_param_html5(name, value):
+    """
+    Helper function to format and quote a single header parameter using the
+    HTML5 strategy.
+
+    Particularly useful for header parameters which might contain
+    non-ASCII values, like file names. This follows the `HTML5 Working Draft
+    Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+
+    .. _HTML5 Working Draft Section 4.10.22.7:
+        https://w3c.github.io/html/sec-forms.html#multipart-form-data
+
+    :param name:
+        The name of the parameter, a string expected to be ASCII only.
+    :param value:
+        The value of the parameter, provided as ``bytes`` or `str``.
+    :ret:
+        A unicode string, stripped of troublesome characters.
+    """
+    if isinstance(value, six.binary_type):
+        value = value.decode("utf-8")
+
+    value = _replace_multiple(value, _HTML5_REPLACEMENTS)
+
+    return u'%s="%s"' % (name, value)
+
+
+# For backwards-compatibility.
+format_header_param = format_header_param_html5
+
+
+class RequestField(object):
+    """
+    A data container for request body parameters.
+
+    :param name:
+        The name of this request field. Must be unicode.
+    :param data:
+        The data/value body.
+    :param filename:
+        An optional filename of the request field. Must be unicode.
+    :param headers:
+        An optional dict-like object of headers to initially use for the field.
+    :param header_formatter:
+        An optional callable that is used to encode and format the headers. By
+        default, this is :func:`format_header_param_html5`.
+    """
+
+    def __init__(
+        self,
+        name,
+        data,
+        filename=None,
+        headers=None,
+        header_formatter=format_header_param_html5,
+    ):
+        self._name = name
+        self._filename = filename
+        self.data = data
+        self.headers = {}
+        if headers:
+            self.headers = dict(headers)
+        self.header_formatter = header_formatter
+
+    @classmethod
+    def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
+        """
+        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+        Supports constructing :class:`~urllib3.fields.RequestField` from
+        parameter of key/value strings AND key/filetuple. A filetuple is a
+        (filename, data, MIME type) tuple where the MIME type is optional.
+        For example::
+
+            'foo': 'bar',
+            'fakefile': ('foofile.txt', 'contents of foofile'),
+            'realfile': ('barfile.txt', open('realfile').read()),
+            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+            'nonamefile': 'contents of nonamefile field',
+
+        Field names and filenames must be unicode.
+        """
+        if isinstance(value, tuple):
+            if len(value) == 3:
+                filename, data, content_type = value
+            else:
+                filename, data = value
+                content_type = guess_content_type(filename)
+        else:
+            filename = None
+            content_type = None
+            data = value
+
+        request_param = cls(
+            fieldname, data, filename=filename, header_formatter=header_formatter
+        )
+        request_param.make_multipart(content_type=content_type)
+
+        return request_param
+
+    def _render_part(self, name, value):
+        """
+        Overridable helper function to format a single header parameter. By
+        default, this calls ``self.header_formatter``.
+
+        :param name:
+            The name of the parameter, a string expected to be ASCII only.
+        :param value:
+            The value of the parameter, provided as a unicode string.
+        """
+
+        return self.header_formatter(name, value)
+
+    def _render_parts(self, header_parts):
+        """
+        Helper function to format and quote a single header.
+
+        Useful for single headers that are composed of multiple items. E.g.,
+        'Content-Disposition' fields.
+
+        :param header_parts:
+            A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+            as `k1="v1"; k2="v2"; ...`.
+        """
+        parts = []
+        iterable = header_parts
+        if isinstance(header_parts, dict):
+            iterable = header_parts.items()
+
+        for name, value in iterable:
+            if value is not None:
+                parts.append(self._render_part(name, value))
+
+        return u"; ".join(parts)
+
+    def render_headers(self):
+        """
+        Renders the headers for this request field.
+        """
+        lines = []
+
+        sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
+        for sort_key in sort_keys:
+            if self.headers.get(sort_key, False):
+                lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
+
+        for header_name, header_value in self.headers.items():
+            if header_name not in sort_keys:
+                if header_value:
+                    lines.append(u"%s: %s" % (header_name, header_value))
+
+        lines.append(u"\r\n")
+        return u"\r\n".join(lines)
+
+    def make_multipart(
+        self, content_disposition=None, content_type=None, content_location=None
+    ):
+        """
+        Makes this request field into a multipart request field.
+
+        This method overrides "Content-Disposition", "Content-Type" and
+        "Content-Location" headers to the request parameter.
+
+        :param content_type:
+            The 'Content-Type' of the request body.
+        :param content_location:
+            The 'Content-Location' of the request body.
+
+        """
+        self.headers["Content-Disposition"] = content_disposition or u"form-data"
+        self.headers["Content-Disposition"] += u"; ".join(
+            [
+                u"",
+                self._render_parts(
+                    ((u"name", self._name), (u"filename", self._filename))
+                ),
+            ]
+        )
+        self.headers["Content-Type"] = content_type
+        self.headers["Content-Location"] = content_location
diff --git a/venv/lib/python3.7/site-packages/urllib3/filepost.py b/venv/lib/python3.7/site-packages/urllib3/filepost.py
new file mode 100644
index 00000000..36c9252c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/filepost.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import
+
+import binascii
+import codecs
+import os
+from io import BytesIO
+
+from .fields import RequestField
+from .packages import six
+from .packages.six import b
+
+writer = codecs.lookup("utf-8")[3]
+
+
+def choose_boundary():
+    """
+    Our embarrassingly-simple replacement for mimetools.choose_boundary.
+    """
+    boundary = binascii.hexlify(os.urandom(16))
+    if not six.PY2:
+        boundary = boundary.decode("ascii")
+    return boundary
+
+
+def iter_field_objects(fields):
+    """
+    Iterate over fields.
+
+    Supports list of (k, v) tuples and dicts, and lists of
+    :class:`~urllib3.fields.RequestField`.
+
+    """
+    if isinstance(fields, dict):
+        i = six.iteritems(fields)
+    else:
+        i = iter(fields)
+
+    for field in i:
+        if isinstance(field, RequestField):
+            yield field
+        else:
+            yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+    """
+    .. deprecated:: 1.6
+
+    Iterate over fields.
+
+    The addition of :class:`~urllib3.fields.RequestField` makes this function
+    obsolete. Instead, use :func:`iter_field_objects`, which returns
+    :class:`~urllib3.fields.RequestField` objects.
+
+    Supports list of (k, v) tuples and dicts.
+    """
+    if isinstance(fields, dict):
+        return ((k, v) for k, v in six.iteritems(fields))
+
+    return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+    """
+    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+    :param fields:
+        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+    :param boundary:
+        If not specified, then a random boundary will be generated using
+        :func:`urllib3.filepost.choose_boundary`.
+    """
+    body = BytesIO()
+    if boundary is None:
+        boundary = choose_boundary()
+
+    for field in iter_field_objects(fields):
+        body.write(b("--%s\r\n" % (boundary)))
+
+        writer(body).write(field.render_headers())
+        data = field.data
+
+        if isinstance(data, int):
+            data = str(data)  # Backwards compatibility
+
+        if isinstance(data, six.text_type):
+            writer(body).write(data)
+        else:
+            body.write(data)
+
+        body.write(b"\r\n")
+
+    body.write(b("--%s--\r\n" % (boundary)))
+
+    content_type = str("multipart/form-data; boundary=%s" % boundary)
+
+    return body.getvalue(), content_type
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/__init__.py b/venv/lib/python3.7/site-packages/urllib3/packages/__init__.py
new file mode 100644
index 00000000..fce4caa6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ("ssl_match_hostname",)
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/backports/__init__.py b/venv/lib/python3.7/site-packages/urllib3/packages/backports/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py b/venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py
new file mode 100644
index 00000000..b8fb2154
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+"""
+backports.makefile
+~~~~~~~~~~~~~~~~~~
+
+Backports the Python 3 ``socket.makefile`` method for use with anything that
+wants to create a "fake" socket object.
+"""
+import io
+from socket import SocketIO
+
+
+def backport_makefile(
+    self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+):
+    """
+    Backport of ``socket.makefile`` from Python 3.5.
+    """
+    if not set(mode) <= {"r", "w", "b"}:
+        raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+    writing = "w" in mode
+    reading = "r" in mode or not writing
+    assert reading or writing
+    binary = "b" in mode
+    rawmode = ""
+    if reading:
+        rawmode += "r"
+    if writing:
+        rawmode += "w"
+    raw = SocketIO(self, rawmode)
+    self._makefile_refs += 1
+    if buffering is None:
+        buffering = -1
+    if buffering < 0:
+        buffering = io.DEFAULT_BUFFER_SIZE
+    if buffering == 0:
+        if not binary:
+            raise ValueError("unbuffered streams must be binary")
+        return raw
+    if reading and writing:
+        buffer = io.BufferedRWPair(raw, raw, buffering)
+    elif reading:
+        buffer = io.BufferedReader(raw, buffering)
+    else:
+        assert writing
+        buffer = io.BufferedWriter(raw, buffering)
+    if binary:
+        return buffer
+    text = io.TextIOWrapper(buffer, encoding, errors, newline)
+    text.mode = mode
+    return text
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/six.py b/venv/lib/python3.7/site-packages/urllib3/packages/six.py
new file mode 100644
index 00000000..31442409
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/packages/six.py
@@ -0,0 +1,1021 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.12.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+    string_types = (str,)
+    integer_types = (int,)
+    class_types = (type,)
+    text_type = str
+    binary_type = bytes
+
+    MAXSIZE = sys.maxsize
+else:
+    string_types = (basestring,)
+    integer_types = (int, long)
+    class_types = (type, types.ClassType)
+    text_type = unicode
+    binary_type = str
+
+    if sys.platform.startswith("java"):
+        # Jython always uses 32 bits.
+        MAXSIZE = int((1 << 31) - 1)
+    else:
+        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+        class X(object):
+            def __len__(self):
+                return 1 << 31
+
+        try:
+            len(X())
+        except OverflowError:
+            # 32-bit
+            MAXSIZE = int((1 << 31) - 1)
+        else:
+            # 64-bit
+            MAXSIZE = int((1 << 63) - 1)
+        del X
+
+
+def _add_doc(func, doc):
+    """Add documentation to a function."""
+    func.__doc__ = doc
+
+
+def _import_module(name):
+    """Import module, returning the module after the last dot."""
+    __import__(name)
+    return sys.modules[name]
+
+
+class _LazyDescr(object):
+    def __init__(self, name):
+        self.name = name
+
+    def __get__(self, obj, tp):
+        result = self._resolve()
+        setattr(obj, self.name, result)  # Invokes __set__.
+        try:
+            # This is a bit ugly, but it avoids running this again by
+            # removing this descriptor.
+            delattr(obj.__class__, self.name)
+        except AttributeError:
+            pass
+        return result
+
+
+class MovedModule(_LazyDescr):
+    def __init__(self, name, old, new=None):
+        super(MovedModule, self).__init__(name)
+        if PY3:
+            if new is None:
+                new = name
+            self.mod = new
+        else:
+            self.mod = old
+
+    def _resolve(self):
+        return _import_module(self.mod)
+
+    def __getattr__(self, attr):
+        _module = self._resolve()
+        value = getattr(_module, attr)
+        setattr(self, attr, value)
+        return value
+
+
+class _LazyModule(types.ModuleType):
+    def __init__(self, name):
+        super(_LazyModule, self).__init__(name)
+        self.__doc__ = self.__class__.__doc__
+
+    def __dir__(self):
+        attrs = ["__doc__", "__name__"]
+        attrs += [attr.name for attr in self._moved_attributes]
+        return attrs
+
+    # Subclasses should override this
+    _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+        super(MovedAttribute, self).__init__(name)
+        if PY3:
+            if new_mod is None:
+                new_mod = name
+            self.mod = new_mod
+            if new_attr is None:
+                if old_attr is None:
+                    new_attr = name
+                else:
+                    new_attr = old_attr
+            self.attr = new_attr
+        else:
+            self.mod = old_mod
+            if old_attr is None:
+                old_attr = name
+            self.attr = old_attr
+
+    def _resolve(self):
+        module = _import_module(self.mod)
+        return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+    """
+    A meta path importer to import six.moves and its submodules.
+
+    This class implements a PEP302 finder and loader. It should be compatible
+    with Python 2.5 and all existing versions of Python3
+    """
+
+    def __init__(self, six_module_name):
+        self.name = six_module_name
+        self.known_modules = {}
+
+    def _add_module(self, mod, *fullnames):
+        for fullname in fullnames:
+            self.known_modules[self.name + "." + fullname] = mod
+
+    def _get_module(self, fullname):
+        return self.known_modules[self.name + "." + fullname]
+
+    def find_module(self, fullname, path=None):
+        if fullname in self.known_modules:
+            return self
+        return None
+
+    def __get_module(self, fullname):
+        try:
+            return self.known_modules[fullname]
+        except KeyError:
+            raise ImportError("This loader does not know module " + fullname)
+
+    def load_module(self, fullname):
+        try:
+            # in case of a reload
+            return sys.modules[fullname]
+        except KeyError:
+            pass
+        mod = self.__get_module(fullname)
+        if isinstance(mod, MovedModule):
+            mod = mod._resolve()
+        else:
+            mod.__loader__ = self
+        sys.modules[fullname] = mod
+        return mod
+
+    def is_package(self, fullname):
+        """
+        Return true, if the named module is a package.
+
+        We need this method to get correct spec objects with
+        Python 3.4 (see PEP451)
+        """
+        return hasattr(self.__get_module(fullname), "__path__")
+
+    def get_code(self, fullname):
+        """Return None
+
+        Required, if is_package is implemented"""
+        self.__get_module(fullname)  # eventually raises ImportError
+        return None
+
+    get_source = get_code  # same as get_code
+
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+    """Lazy loading of moved objects"""
+
+    __path__ = []  # mark as package
+
+
+_moved_attributes = [
+    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+    MovedAttribute(
+        "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
+    ),
+    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+    MovedAttribute("intern", "__builtin__", "sys"),
+    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+    MovedAttribute("getoutput", "commands", "subprocess"),
+    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute(
+        "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
+    ),
+    MovedAttribute("reduce", "__builtin__", "functools"),
+    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+    MovedAttribute("StringIO", "StringIO", "io"),
+    MovedAttribute("UserDict", "UserDict", "collections"),
+    MovedAttribute("UserList", "UserList", "collections"),
+    MovedAttribute("UserString", "UserString", "collections"),
+    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+    MovedAttribute(
+        "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
+    ),
+    MovedModule("builtins", "__builtin__"),
+    MovedModule("configparser", "ConfigParser"),
+    MovedModule("copyreg", "copy_reg"),
+    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+    MovedModule("http_cookies", "Cookie", "http.cookies"),
+    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+    MovedModule("html_parser", "HTMLParser", "html.parser"),
+    MovedModule("http_client", "httplib", "http.client"),
+    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+    MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+    MovedModule(
+        "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
+    ),
+    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+    MovedModule("cPickle", "cPickle", "pickle"),
+    MovedModule("queue", "Queue"),
+    MovedModule("reprlib", "repr"),
+    MovedModule("socketserver", "SocketServer"),
+    MovedModule("_thread", "thread", "_thread"),
+    MovedModule("tkinter", "Tkinter"),
+    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+    MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
+    MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
+    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
+    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+    _moved_attributes += [MovedModule("winreg", "_winreg")]
+
+for attr in _moved_attributes:
+    setattr(_MovedItems, attr.name, attr)
+    if isinstance(attr, MovedModule):
+        _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("quote", "urllib", "urllib.parse"),
+    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+    MovedAttribute(
+        "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
+    ),
+    MovedAttribute("urlencode", "urllib", "urllib.parse"),
+    MovedAttribute("splitquery", "urllib", "urllib.parse"),
+    MovedAttribute("splittag", "urllib", "urllib.parse"),
+    MovedAttribute("splituser", "urllib", "urllib.parse"),
+    MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+    setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(
+    Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+    "moves.urllib_parse",
+    "moves.urllib.parse",
+)
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+    MovedAttribute("URLError", "urllib2", "urllib.error"),
+    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+    setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(
+    Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+    "moves.urllib_error",
+    "moves.urllib.error",
+)
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+    MovedAttribute("urlopen", "urllib2", "urllib.request"),
+    MovedAttribute("install_opener", "urllib2", "urllib.request"),
+    MovedAttribute("build_opener", "urllib2", "urllib.request"),
+    MovedAttribute("pathname2url", "urllib", "urllib.request"),
+    MovedAttribute("url2pathname", "urllib", "urllib.request"),
+    MovedAttribute("getproxies", "urllib", "urllib.request"),
+    MovedAttribute("Request", "urllib2", "urllib.request"),
+    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+    MovedAttribute("URLopener", "urllib", "urllib.request"),
+    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+    MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+    MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+    setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(
+    Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+    "moves.urllib_request",
+    "moves.urllib.request",
+)
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+    MovedAttribute("addbase", "urllib", "urllib.response"),
+    MovedAttribute("addclosehook", "urllib", "urllib.response"),
+    MovedAttribute("addinfo", "urllib", "urllib.response"),
+    MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+    setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(
+    Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+    "moves.urllib_response",
+    "moves.urllib.response",
+)
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")
+]
+for attr in _urllib_robotparser_moved_attributes:
+    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = (
+    _urllib_robotparser_moved_attributes
+)
+
+_importer._add_module(
+    Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+    "moves.urllib_robotparser",
+    "moves.urllib.robotparser",
+)
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+
+    __path__ = []  # mark as package
+    parse = _importer._get_module("moves.urllib_parse")
+    error = _importer._get_module("moves.urllib_error")
+    request = _importer._get_module("moves.urllib_request")
+    response = _importer._get_module("moves.urllib_response")
+    robotparser = _importer._get_module("moves.urllib_robotparser")
+
+    def __dir__(self):
+        return ["parse", "error", "request", "response", "robotparser"]
+
+
+_importer._add_module(
+    Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
+)
+
+
+def add_move(move):
+    """Add an item to six.moves."""
+    setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+    """Remove item from six.moves."""
+    try:
+        delattr(_MovedItems, name)
+    except AttributeError:
+        try:
+            del moves.__dict__[name]
+        except KeyError:
+            raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+    _meth_func = "__func__"
+    _meth_self = "__self__"
+
+    _func_closure = "__closure__"
+    _func_code = "__code__"
+    _func_defaults = "__defaults__"
+    _func_globals = "__globals__"
+else:
+    _meth_func = "im_func"
+    _meth_self = "im_self"
+
+    _func_closure = "func_closure"
+    _func_code = "func_code"
+    _func_defaults = "func_defaults"
+    _func_globals = "func_globals"
+
+
+try:
+    advance_iterator = next
+except NameError:
+
+    def advance_iterator(it):
+        return it.next()
+
+
+next = advance_iterator
+
+
+try:
+    callable = callable
+except NameError:
+
+    def callable(obj):
+        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+
+    def get_unbound_function(unbound):
+        return unbound
+
+    create_bound_method = types.MethodType
+
+    def create_unbound_method(func, cls):
+        return func
+
+    Iterator = object
+else:
+
+    def get_unbound_function(unbound):
+        return unbound.im_func
+
+    def create_bound_method(func, obj):
+        return types.MethodType(func, obj, obj.__class__)
+
+    def create_unbound_method(func, cls):
+        return types.MethodType(func, None, cls)
+
+    class Iterator(object):
+        def next(self):
+            return type(self).__next__(self)
+
+    callable = callable
+_add_doc(
+    get_unbound_function, """Get the function out of a possibly unbound function"""
+)
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+
+    def iterkeys(d, **kw):
+        return iter(d.keys(**kw))
+
+    def itervalues(d, **kw):
+        return iter(d.values(**kw))
+
+    def iteritems(d, **kw):
+        return iter(d.items(**kw))
+
+    def iterlists(d, **kw):
+        return iter(d.lists(**kw))
+
+    viewkeys = operator.methodcaller("keys")
+
+    viewvalues = operator.methodcaller("values")
+
+    viewitems = operator.methodcaller("items")
+else:
+
+    def iterkeys(d, **kw):
+        return d.iterkeys(**kw)
+
+    def itervalues(d, **kw):
+        return d.itervalues(**kw)
+
+    def iteritems(d, **kw):
+        return d.iteritems(**kw)
+
+    def iterlists(d, **kw):
+        return d.iterlists(**kw)
+
+    viewkeys = operator.methodcaller("viewkeys")
+
+    viewvalues = operator.methodcaller("viewvalues")
+
+    viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(
+    iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
+)
+
+
+if PY3:
+
+    def b(s):
+        return s.encode("latin-1")
+
+    def u(s):
+        return s
+
+    unichr = chr
+    import struct
+
+    int2byte = struct.Struct(">B").pack
+    del struct
+    byte2int = operator.itemgetter(0)
+    indexbytes = operator.getitem
+    iterbytes = iter
+    import io
+
+    StringIO = io.StringIO
+    BytesIO = io.BytesIO
+    del io
+    _assertCountEqual = "assertCountEqual"
+    if sys.version_info[1] <= 1:
+        _assertRaisesRegex = "assertRaisesRegexp"
+        _assertRegex = "assertRegexpMatches"
+    else:
+        _assertRaisesRegex = "assertRaisesRegex"
+        _assertRegex = "assertRegex"
+else:
+
+    def b(s):
+        return s
+
+    # Workaround for standalone backslash
+
+    def u(s):
+        return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
+
+    unichr = unichr
+    int2byte = chr
+
+    def byte2int(bs):
+        return ord(bs[0])
+
+    def indexbytes(buf, i):
+        return ord(buf[i])
+
+    iterbytes = functools.partial(itertools.imap, ord)
+    import StringIO
+
+    StringIO = BytesIO = StringIO.StringIO
+    _assertCountEqual = "assertItemsEqual"
+    _assertRaisesRegex = "assertRaisesRegexp"
+    _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+    return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+    return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+    return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+    exec_ = getattr(moves.builtins, "exec")
+
+    def reraise(tp, value, tb=None):
+        try:
+            if value is None:
+                value = tp()
+            if value.__traceback__ is not tb:
+                raise value.with_traceback(tb)
+            raise value
+        finally:
+            value = None
+            tb = None
+
+
+else:
+
+    def exec_(_code_, _globs_=None, _locs_=None):
+        """Execute code in a namespace."""
+        if _globs_ is None:
+            frame = sys._getframe(1)
+            _globs_ = frame.f_globals
+            if _locs_ is None:
+                _locs_ = frame.f_locals
+            del frame
+        elif _locs_ is None:
+            _locs_ = _globs_
+        exec("""exec _code_ in _globs_, _locs_""")
+
+    exec_(
+        """def reraise(tp, value, tb=None):
+    try:
+        raise tp, value, tb
+    finally:
+        tb = None
+"""
+    )
+
+
+if sys.version_info[:2] == (3, 2):
+    exec_(
+        """def raise_from(value, from_value):
+    try:
+        if from_value is None:
+            raise value
+        raise value from from_value
+    finally:
+        value = None
+"""
+    )
+elif sys.version_info[:2] > (3, 2):
+    exec_(
+        """def raise_from(value, from_value):
+    try:
+        raise value from from_value
+    finally:
+        value = None
+"""
+    )
+else:
+
+    def raise_from(value, from_value):
+        raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+
+    def print_(*args, **kwargs):
+        """The new-style print function for Python 2.4 and 2.5."""
+        fp = kwargs.pop("file", sys.stdout)
+        if fp is None:
+            return
+
+        def write(data):
+            if not isinstance(data, basestring):
+                data = str(data)
+            # If the file has an encoding, encode unicode with it.
+            if (
+                isinstance(fp, file)
+                and isinstance(data, unicode)
+                and fp.encoding is not None
+            ):
+                errors = getattr(fp, "errors", None)
+                if errors is None:
+                    errors = "strict"
+                data = data.encode(fp.encoding, errors)
+            fp.write(data)
+
+        want_unicode = False
+        sep = kwargs.pop("sep", None)
+        if sep is not None:
+            if isinstance(sep, unicode):
+                want_unicode = True
+            elif not isinstance(sep, str):
+                raise TypeError("sep must be None or a string")
+        end = kwargs.pop("end", None)
+        if end is not None:
+            if isinstance(end, unicode):
+                want_unicode = True
+            elif not isinstance(end, str):
+                raise TypeError("end must be None or a string")
+        if kwargs:
+            raise TypeError("invalid keyword arguments to print()")
+        if not want_unicode:
+            for arg in args:
+                if isinstance(arg, unicode):
+                    want_unicode = True
+                    break
+        if want_unicode:
+            newline = unicode("\n")
+            space = unicode(" ")
+        else:
+            newline = "\n"
+            space = " "
+        if sep is None:
+            sep = space
+        if end is None:
+            end = newline
+        for i, arg in enumerate(args):
+            if i:
+                write(sep)
+            write(arg)
+        write(end)
+
+
+if sys.version_info[:2] < (3, 3):
+    _print = print_
+
+    def print_(*args, **kwargs):
+        fp = kwargs.get("file", sys.stdout)
+        flush = kwargs.pop("flush", False)
+        _print(*args, **kwargs)
+        if flush and fp is not None:
+            fp.flush()
+
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+
+    def wraps(
+        wrapped,
+        assigned=functools.WRAPPER_ASSIGNMENTS,
+        updated=functools.WRAPPER_UPDATES,
+    ):
+        def wrapper(f):
+            f = functools.wraps(wrapped, assigned, updated)(f)
+            f.__wrapped__ = wrapped
+            return f
+
+        return wrapper
+
+
+else:
+    wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+    """Create a base class with a metaclass."""
+    # This requires a bit of explanation: the basic idea is to make a dummy
+    # metaclass for one level of class instantiation that replaces itself with
+    # the actual metaclass.
+    class metaclass(type):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+
+        @classmethod
+        def __prepare__(cls, name, this_bases):
+            return meta.__prepare__(name, bases)
+
+    return type.__new__(metaclass, "temporary_class", (), {})
+
+
+def add_metaclass(metaclass):
+    """Class decorator for creating a class with a metaclass."""
+
+    def wrapper(cls):
+        orig_vars = cls.__dict__.copy()
+        slots = orig_vars.get("__slots__")
+        if slots is not None:
+            if isinstance(slots, str):
+                slots = [slots]
+            for slots_var in slots:
+                orig_vars.pop(slots_var)
+        orig_vars.pop("__dict__", None)
+        orig_vars.pop("__weakref__", None)
+        if hasattr(cls, "__qualname__"):
+            orig_vars["__qualname__"] = cls.__qualname__
+        return metaclass(cls.__name__, cls.__bases__, orig_vars)
+
+    return wrapper
+
+
+def ensure_binary(s, encoding="utf-8", errors="strict"):
+    """Coerce **s** to six.binary_type.
+
+    For Python 2:
+      - `unicode` -> encoded to `str`
+      - `str` -> `str`
+
+    For Python 3:
+      - `str` -> encoded to `bytes`
+      - `bytes` -> `bytes`
+    """
+    if isinstance(s, text_type):
+        return s.encode(encoding, errors)
+    elif isinstance(s, binary_type):
+        return s
+    else:
+        raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding="utf-8", errors="strict"):
+    """Coerce *s* to `str`.
+
+    For Python 2:
+      - `unicode` -> encoded to `str`
+      - `str` -> `str`
+
+    For Python 3:
+      - `str` -> `str`
+      - `bytes` -> decoded to `str`
+    """
+    if not isinstance(s, (text_type, binary_type)):
+        raise TypeError("not expecting type '%s'" % type(s))
+    if PY2 and isinstance(s, text_type):
+        s = s.encode(encoding, errors)
+    elif PY3 and isinstance(s, binary_type):
+        s = s.decode(encoding, errors)
+    return s
+
+
+def ensure_text(s, encoding="utf-8", errors="strict"):
+    """Coerce *s* to six.text_type.
+
+    For Python 2:
+      - `unicode` -> `unicode`
+      - `str` -> `unicode`
+
+    For Python 3:
+      - `str` -> `str`
+      - `bytes` -> decoded to `str`
+    """
+    if isinstance(s, binary_type):
+        return s.decode(encoding, errors)
+    elif isinstance(s, text_type):
+        return s
+    else:
+        raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+    """
+    A decorator that defines __unicode__ and __str__ methods under Python 2.
+    Under Python 3 it does nothing.
+
+    To support Python 2 and 3 with a single code base, define a __str__ method
+    returning text and apply this decorator to the class.
+    """
+    if PY2:
+        if "__str__" not in klass.__dict__:
+            raise ValueError(
+                "@python_2_unicode_compatible cannot be applied "
+                "to %s because it doesn't define __str__()." % klass.__name__
+            )
+        klass.__unicode__ = klass.__str__
+        klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
+    return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = []  # required for PEP 302 and PEP 451
+__package__ = __name__  # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+    for i, importer in enumerate(sys.meta_path):
+        # Here's some real nastiness: Another "instance" of the six module might
+        # be floating around. Therefore, we can't use isinstance() to check for
+        # the six meta path importer, since the other six instance will have
+        # inserted an importer with different class.
+        if (
+            type(importer).__name__ == "_SixMetaPathImporter"
+            and importer.name == __name__
+        ):
+            del sys.meta_path[i]
+            break
+    del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py b/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 00000000..6b12fd90
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,22 @@
+import sys
+
+try:
+    # Our match_hostname function is the same as 3.5's, so we only want to
+    # import the match_hostname function if it's at least that good.
+    if sys.version_info < (3, 5):
+        raise ImportError("Fallback to vendored code")
+
+    from ssl import CertificateError, match_hostname
+except ImportError:
+    try:
+        # Backport of the function from a pypi module
+        from backports.ssl_match_hostname import (  # type: ignore
+            CertificateError,
+            match_hostname,
+        )
+    except ImportError:
+        # Our vendored copy
+        from ._implementation import CertificateError, match_hostname  # type: ignore
+
+# Not needed, but documenting what we provide.
+__all__ = ("CertificateError", "match_hostname")
diff --git a/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py b/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 00000000..689208d3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,160 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib.   http://docs.python.org/3/license.html
+
+import re
+import sys
+
+# ipaddress has been backported to 2.6+ in pypi.  If it is installed on the
+# system, use it to handle IPAddress ServerAltnames (this was added in
+# python-3.5) otherwise only do DNS matching.  This allows
+# backports.ssl_match_hostname to continue to be used in Python 2.7.
+try:
+    import ipaddress
+except ImportError:
+    ipaddress = None
+
+__version__ = "3.5.0.1"
+
+
+class CertificateError(ValueError):
+    pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+    """Matching according to RFC 6125, section 6.4.3
+
+    http://tools.ietf.org/html/rfc6125#section-6.4.3
+    """
+    pats = []
+    if not dn:
+        return False
+
+    # Ported from python3-syntax:
+    # leftmost, *remainder = dn.split(r'.')
+    parts = dn.split(r".")
+    leftmost = parts[0]
+    remainder = parts[1:]
+
+    wildcards = leftmost.count("*")
+    if wildcards > max_wildcards:
+        # Issue #17980: avoid denials of service by refusing more
+        # than one wildcard per fragment.  A survey of established
+        # policy among SSL implementations showed it to be a
+        # reasonable choice.
+        raise CertificateError(
+            "too many wildcards in certificate DNS name: " + repr(dn)
+        )
+
+    # speed up common case w/o wildcards
+    if not wildcards:
+        return dn.lower() == hostname.lower()
+
+    # RFC 6125, section 6.4.3, subitem 1.
+    # The client SHOULD NOT attempt to match a presented identifier in which
+    # the wildcard character comprises a label other than the left-most label.
+    if leftmost == "*":
+        # When '*' is a fragment by itself, it matches a non-empty dotless
+        # fragment.
+        pats.append("[^.]+")
+    elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
+        # RFC 6125, section 6.4.3, subitem 3.
+        # The client SHOULD NOT attempt to match a presented identifier
+        # where the wildcard character is embedded within an A-label or
+        # U-label of an internationalized domain name.
+        pats.append(re.escape(leftmost))
+    else:
+        # Otherwise, '*' matches any dotless string, e.g. www*
+        pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
+
+    # add the remaining fragments, ignore any wildcards
+    for frag in remainder:
+        pats.append(re.escape(frag))
+
+    pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+    return pat.match(hostname)
+
+
+def _to_unicode(obj):
+    if isinstance(obj, str) and sys.version_info < (3,):
+        obj = unicode(obj, encoding="ascii", errors="strict")
+    return obj
+
+
+def _ipaddress_match(ipname, host_ip):
+    """Exact matching of IP addresses.
+
+    RFC 6125 explicitly doesn't define an algorithm for this
+    (section 1.7.2 - "Out of Scope").
+    """
+    # OpenSSL may add a trailing newline to a subjectAltName's IP address
+    # Divergence from upstream: ipaddress can't handle byte str
+    ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
+    return ip == host_ip
+
+
+def match_hostname(cert, hostname):
+    """Verify that *cert* (in decoded format as returned by
+    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
+    rules are followed, but IP addresses are not accepted for *hostname*.
+
+    CertificateError is raised on failure. On success, the function
+    returns nothing.
+    """
+    if not cert:
+        raise ValueError(
+            "empty or no certificate, match_hostname needs a "
+            "SSL socket or SSL context with either "
+            "CERT_OPTIONAL or CERT_REQUIRED"
+        )
+    try:
+        # Divergence from upstream: ipaddress can't handle byte str
+        host_ip = ipaddress.ip_address(_to_unicode(hostname))
+    except ValueError:
+        # Not an IP address (common case)
+        host_ip = None
+    except UnicodeError:
+        # Divergence from upstream: Have to deal with ipaddress not taking
+        # byte strings.  addresses should be all ascii, so we consider it not
+        # an ipaddress in this case
+        host_ip = None
+    except AttributeError:
+        # Divergence from upstream: Make ipaddress library optional
+        if ipaddress is None:
+            host_ip = None
+        else:
+            raise
+    dnsnames = []
+    san = cert.get("subjectAltName", ())
+    for key, value in san:
+        if key == "DNS":
+            if host_ip is None and _dnsname_match(value, hostname):
+                return
+            dnsnames.append(value)
+        elif key == "IP Address":
+            if host_ip is not None and _ipaddress_match(value, host_ip):
+                return
+            dnsnames.append(value)
+    if not dnsnames:
+        # The subject is only checked when there is no dNSName entry
+        # in subjectAltName
+        for sub in cert.get("subject", ()):
+            for key, value in sub:
+                # XXX according to RFC 2818, the most specific Common Name
+                # must be used.
+                if key == "commonName":
+                    if _dnsname_match(value, hostname):
+                        return
+                    dnsnames.append(value)
+    if len(dnsnames) > 1:
+        raise CertificateError(
+            "hostname %r "
+            "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
+        )
+    elif len(dnsnames) == 1:
+        raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
+    else:
+        raise CertificateError(
+            "no appropriate commonName or subjectAltName fields were found"
+        )
diff --git a/venv/lib/python3.7/site-packages/urllib3/poolmanager.py b/venv/lib/python3.7/site-packages/urllib3/poolmanager.py
new file mode 100644
index 00000000..3a31a285
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/poolmanager.py
@@ -0,0 +1,536 @@
+from __future__ import absolute_import
+
+import collections
+import functools
+import logging
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
+from .exceptions import (
+    LocationValueError,
+    MaxRetryError,
+    ProxySchemeUnknown,
+    ProxySchemeUnsupported,
+    URLSchemeUnknown,
+)
+from .packages import six
+from .packages.six.moves.urllib.parse import urljoin
+from .request import RequestMethods
+from .util.proxy import connection_requires_http_tunnel
+from .util.retry import Retry
+from .util.url import parse_url
+
+__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = (
+    "key_file",
+    "cert_file",
+    "cert_reqs",
+    "ca_certs",
+    "ssl_version",
+    "ca_cert_dir",
+    "ssl_context",
+    "key_password",
+)
+
+# All known keyword arguments that could be provided to the pool manager, its
+# pools, or the underlying connections. This is used to construct a pool key.
+_key_fields = (
+    "key_scheme",  # str
+    "key_host",  # str
+    "key_port",  # int
+    "key_timeout",  # int or float or Timeout
+    "key_retries",  # int or Retry
+    "key_strict",  # bool
+    "key_block",  # bool
+    "key_source_address",  # str
+    "key_key_file",  # str
+    "key_key_password",  # str
+    "key_cert_file",  # str
+    "key_cert_reqs",  # str
+    "key_ca_certs",  # str
+    "key_ssl_version",  # str
+    "key_ca_cert_dir",  # str
+    "key_ssl_context",  # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+    "key_maxsize",  # int
+    "key_headers",  # dict
+    "key__proxy",  # parsed proxy url
+    "key__proxy_headers",  # dict
+    "key__proxy_config",  # class
+    "key_socket_options",  # list of (level (int), optname (int), value (int or str)) tuples
+    "key__socks_options",  # dict
+    "key_assert_hostname",  # bool or string
+    "key_assert_fingerprint",  # str
+    "key_server_hostname",  # str
+)
+
+#: The namedtuple class used to construct keys for the connection pool.
+#: All custom key schemes should include the fields in this key at a minimum.
+PoolKey = collections.namedtuple("PoolKey", _key_fields)
+
+_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
+ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
+
+
+def _default_key_normalizer(key_class, request_context):
+    """
+    Create a pool key out of a request context dictionary.
+
+    According to RFC 3986, both the scheme and host are case-insensitive.
+    Therefore, this function normalizes both before constructing the pool
+    key for an HTTPS request. If you wish to change this behaviour, provide
+    alternate callables to ``key_fn_by_scheme``.
+
+    :param key_class:
+        The class to use when constructing the key. This should be a namedtuple
+        with the ``scheme`` and ``host`` keys at a minimum.
+    :type  key_class: namedtuple
+    :param request_context:
+        A dictionary-like object that contain the context for a request.
+    :type  request_context: dict
+
+    :return: A namedtuple that can be used as a connection pool key.
+    :rtype:  PoolKey
+    """
+    # Since we mutate the dictionary, make a copy first
+    context = request_context.copy()
+    context["scheme"] = context["scheme"].lower()
+    context["host"] = context["host"].lower()
+
+    # These are both dictionaries and need to be transformed into frozensets
+    for key in ("headers", "_proxy_headers", "_socks_options"):
+        if key in context and context[key] is not None:
+            context[key] = frozenset(context[key].items())
+
+    # The socket_options key may be a list and needs to be transformed into a
+    # tuple.
+    socket_opts = context.get("socket_options")
+    if socket_opts is not None:
+        context["socket_options"] = tuple(socket_opts)
+
+    # Map the kwargs to the names in the namedtuple - this is necessary since
+    # namedtuples can't have fields starting with '_'.
+    for key in list(context.keys()):
+        context["key_" + key] = context.pop(key)
+
+    # Default to ``None`` for keys missing from the context
+    for field in key_class._fields:
+        if field not in context:
+            context[field] = None
+
+    return key_class(**context)
+
+
+#: A dictionary that maps a scheme to a callable that creates a pool key.
+#: This can be used to alter the way pool keys are constructed, if desired.
+#: Each PoolManager makes a copy of this dictionary so they can be configured
+#: globally here, or individually on the instance.
+key_fn_by_scheme = {
+    "http": functools.partial(_default_key_normalizer, PoolKey),
+    "https": functools.partial(_default_key_normalizer, PoolKey),
+}
+
+pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
+
+
+class PoolManager(RequestMethods):
+    """
+    Allows for arbitrary requests while transparently keeping track of
+    necessary connection pools for you.
+
+    :param num_pools:
+        Number of connection pools to cache before discarding the least
+        recently used pool.
+
+    :param headers:
+        Headers to include with all requests, unless other headers are given
+        explicitly.
+
+    :param \\**connection_pool_kw:
+        Additional parameters are used to create fresh
+        :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+    Example::
+
+        >>> manager = PoolManager(num_pools=2)
+        >>> r = manager.request('GET', 'http://google.com/')
+        >>> r = manager.request('GET', 'http://google.com/mail')
+        >>> r = manager.request('GET', 'http://yahoo.com/')
+        >>> len(manager.pools)
+        2
+
+    """
+
+    proxy = None
+    proxy_config = None
+
+    def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+        RequestMethods.__init__(self, headers)
+        self.connection_pool_kw = connection_pool_kw
+        self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+
+        # Locally set the pool classes and keys so other PoolManagers can
+        # override them.
+        self.pool_classes_by_scheme = pool_classes_by_scheme
+        self.key_fn_by_scheme = key_fn_by_scheme.copy()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.clear()
+        # Return False to re-raise any potential exceptions
+        return False
+
+    def _new_pool(self, scheme, host, port, request_context=None):
+        """
+        Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
+        any additional pool keyword arguments.
+
+        If ``request_context`` is provided, it is provided as keyword arguments
+        to the pool class used. This method is used to actually create the
+        connection pools handed out by :meth:`connection_from_url` and
+        companion methods. It is intended to be overridden for customization.
+        """
+        pool_cls = self.pool_classes_by_scheme[scheme]
+        if request_context is None:
+            request_context = self.connection_pool_kw.copy()
+
+        # Although the context has everything necessary to create the pool,
+        # this function has historically only used the scheme, host, and port
+        # in the positional args. When an API change is acceptable these can
+        # be removed.
+        for key in ("scheme", "host", "port"):
+            request_context.pop(key, None)
+
+        if scheme == "http":
+            for kw in SSL_KEYWORDS:
+                request_context.pop(kw, None)
+
+        return pool_cls(host, port, **request_context)
+
+    def clear(self):
+        """
+        Empty our store of pools and direct them all to close.
+
+        This will not affect in-flight connections, but they will not be
+        re-used after completion.
+        """
+        self.pools.clear()
+
+    def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+        """
+        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
+
+        If ``port`` isn't given, it will be derived from the ``scheme`` using
+        ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+        provided, it is merged with the instance's ``connection_pool_kw``
+        variable and used to create the new connection pool, if one is
+        needed.
+        """
+
+        if not host:
+            raise LocationValueError("No host specified.")
+
+        request_context = self._merge_pool_kwargs(pool_kwargs)
+        request_context["scheme"] = scheme or "http"
+        if not port:
+            port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+        request_context["port"] = port
+        request_context["host"] = host
+
+        return self.connection_from_context(request_context)
+
+    def connection_from_context(self, request_context):
+        """
+        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
+
+        ``request_context`` must at least contain the ``scheme`` key and its
+        value must be a key in ``key_fn_by_scheme`` instance variable.
+        """
+        scheme = request_context["scheme"].lower()
+        pool_key_constructor = self.key_fn_by_scheme.get(scheme)
+        if not pool_key_constructor:
+            raise URLSchemeUnknown(scheme)
+        pool_key = pool_key_constructor(request_context)
+
+        return self.connection_from_pool_key(pool_key, request_context=request_context)
+
+    def connection_from_pool_key(self, pool_key, request_context=None):
+        """
+        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
+
+        ``pool_key`` should be a namedtuple that only contains immutable
+        objects. At a minimum it must have the ``scheme``, ``host``, and
+        ``port`` fields.
+        """
+        with self.pools.lock:
+            # If the scheme, host, or port doesn't match existing open
+            # connections, open a new ConnectionPool.
+            pool = self.pools.get(pool_key)
+            if pool:
+                return pool
+
+            # Make a fresh ConnectionPool of the desired type
+            scheme = request_context["scheme"]
+            host = request_context["host"]
+            port = request_context["port"]
+            pool = self._new_pool(scheme, host, port, request_context=request_context)
+            self.pools[pool_key] = pool
+
+        return pool
+
+    def connection_from_url(self, url, pool_kwargs=None):
+        """
+        Similar to :func:`urllib3.connectionpool.connection_from_url`.
+
+        If ``pool_kwargs`` is not provided and a new pool needs to be
+        constructed, ``self.connection_pool_kw`` is used to initialize
+        the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
+        is provided, it is used instead. Note that if a new pool does not
+        need to be created for the request, the provided ``pool_kwargs`` are
+        not used.
+        """
+        u = parse_url(url)
+        return self.connection_from_host(
+            u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
+        )
+
+    def _merge_pool_kwargs(self, override):
+        """
+        Merge a dictionary of override values for self.connection_pool_kw.
+
+        This does not modify self.connection_pool_kw and returns a new dict.
+        Any keys in the override dictionary with a value of ``None`` are
+        removed from the merged dictionary.
+        """
+        base_pool_kwargs = self.connection_pool_kw.copy()
+        if override:
+            for key, value in override.items():
+                if value is None:
+                    try:
+                        del base_pool_kwargs[key]
+                    except KeyError:
+                        pass
+                else:
+                    base_pool_kwargs[key] = value
+        return base_pool_kwargs
+
+    def _proxy_requires_url_absolute_form(self, parsed_url):
+        """
+        Indicates if the proxy requires the complete destination URL in the
+        request.  Normally this is only needed when not using an HTTP CONNECT
+        tunnel.
+        """
+        if self.proxy is None:
+            return False
+
+        return not connection_requires_http_tunnel(
+            self.proxy, self.proxy_config, parsed_url.scheme
+        )
+
+    def _validate_proxy_scheme_url_selection(self, url_scheme):
+        """
+        Validates that were not attempting to do TLS in TLS connections on
+        Python2 or with unsupported SSL implementations.
+        """
+        if self.proxy is None or url_scheme != "https":
+            return
+
+        if self.proxy.scheme != "https":
+            return
+
+        if six.PY2 and not self.proxy_config.use_forwarding_for_https:
+            raise ProxySchemeUnsupported(
+                "Contacting HTTPS destinations through HTTPS proxies "
+                "'via CONNECT tunnels' is not supported in Python 2"
+            )
+
+    def urlopen(self, method, url, redirect=True, **kw):
+        """
+        Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
+        with custom cross-host redirect logic and only sends the request-uri
+        portion of the ``url``.
+
+        The given ``url`` parameter must be absolute, such that an appropriate
+        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+        """
+        u = parse_url(url)
+        self._validate_proxy_scheme_url_selection(u.scheme)
+
+        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+        kw["assert_same_host"] = False
+        kw["redirect"] = False
+
+        if "headers" not in kw:
+            kw["headers"] = self.headers.copy()
+
+        if self._proxy_requires_url_absolute_form(u):
+            response = conn.urlopen(method, url, **kw)
+        else:
+            response = conn.urlopen(method, u.request_uri, **kw)
+
+        redirect_location = redirect and response.get_redirect_location()
+        if not redirect_location:
+            return response
+
+        # Support relative URLs for redirecting.
+        redirect_location = urljoin(url, redirect_location)
+
+        # RFC 7231, Section 6.4.4
+        if response.status == 303:
+            method = "GET"
+
+        retries = kw.get("retries")
+        if not isinstance(retries, Retry):
+            retries = Retry.from_int(retries, redirect=redirect)
+
+        # Strip headers marked as unsafe to forward to the redirected location.
+        # Check remove_headers_on_redirect to avoid a potential network call within
+        # conn.is_same_host() which may use socket.gethostbyname() in the future.
+        if retries.remove_headers_on_redirect and not conn.is_same_host(
+            redirect_location
+        ):
+            headers = list(six.iterkeys(kw["headers"]))
+            for header in headers:
+                if header.lower() in retries.remove_headers_on_redirect:
+                    kw["headers"].pop(header, None)
+
+        try:
+            retries = retries.increment(method, url, response=response, _pool=conn)
+        except MaxRetryError:
+            if retries.raise_on_redirect:
+                response.drain_conn()
+                raise
+            return response
+
+        kw["retries"] = retries
+        kw["redirect"] = redirect
+
+        log.info("Redirecting %s -> %s", url, redirect_location)
+
+        response.drain_conn()
+        return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+    """
+    Behaves just like :class:`PoolManager`, but sends all requests through
+    the defined proxy, using the CONNECT method for HTTPS URLs.
+
+    :param proxy_url:
+        The URL of the proxy to be used.
+
+    :param proxy_headers:
+        A dictionary containing headers that will be sent to the proxy. In case
+        of HTTP they are being sent with each request, while in the
+        HTTPS/CONNECT case they are sent only once. Could be used for proxy
+        authentication.
+
+    :param proxy_ssl_context:
+        The proxy SSL context is used to establish the TLS connection to the
+        proxy when using HTTPS proxies.
+
+    :param use_forwarding_for_https:
+        (Defaults to False) If set to True will forward requests to the HTTPS
+        proxy to be made on behalf of the client instead of creating a TLS
+        tunnel via the CONNECT method. **Enabling this flag means that request
+        and response headers and content will be visible from the HTTPS proxy**
+        whereas tunneling keeps request and response headers and content
+        private.  IP address, target hostname, SNI, and port are always visible
+        to an HTTPS proxy even when this flag is disabled.
+
+    Example:
+        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+        >>> r1 = proxy.request('GET', 'http://google.com/')
+        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+        >>> len(proxy.pools)
+        1
+        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+        >>> r4 = proxy.request('GET', 'https://twitter.com/')
+        >>> len(proxy.pools)
+        3
+
+    """
+
+    def __init__(
+        self,
+        proxy_url,
+        num_pools=10,
+        headers=None,
+        proxy_headers=None,
+        proxy_ssl_context=None,
+        use_forwarding_for_https=False,
+        **connection_pool_kw
+    ):
+
+        if isinstance(proxy_url, HTTPConnectionPool):
+            proxy_url = "%s://%s:%i" % (
+                proxy_url.scheme,
+                proxy_url.host,
+                proxy_url.port,
+            )
+        proxy = parse_url(proxy_url)
+
+        if proxy.scheme not in ("http", "https"):
+            raise ProxySchemeUnknown(proxy.scheme)
+
+        if not proxy.port:
+            port = port_by_scheme.get(proxy.scheme, 80)
+            proxy = proxy._replace(port=port)
+
+        self.proxy = proxy
+        self.proxy_headers = proxy_headers or {}
+        self.proxy_ssl_context = proxy_ssl_context
+        self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
+
+        connection_pool_kw["_proxy"] = self.proxy
+        connection_pool_kw["_proxy_headers"] = self.proxy_headers
+        connection_pool_kw["_proxy_config"] = self.proxy_config
+
+        super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
+
+    def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+        if scheme == "https":
+            return super(ProxyManager, self).connection_from_host(
+                host, port, scheme, pool_kwargs=pool_kwargs
+            )
+
+        return super(ProxyManager, self).connection_from_host(
+            self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
+        )
+
+    def _set_proxy_headers(self, url, headers=None):
+        """
+        Sets headers needed by proxies: specifically, the Accept and Host
+        headers. Only sets headers not provided by the user.
+        """
+        headers_ = {"Accept": "*/*"}
+
+        netloc = parse_url(url).netloc
+        if netloc:
+            headers_["Host"] = netloc
+
+        if headers:
+            headers_.update(headers)
+        return headers_
+
+    def urlopen(self, method, url, redirect=True, **kw):
+        "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+        u = parse_url(url)
+        if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
+            # For connections using HTTP CONNECT, httplib sets the necessary
+            # headers on the CONNECT to the proxy. If we're not using CONNECT,
+            # we'll definitely need to set 'Host' at the very least.
+            headers = kw.get("headers", self.headers)
+            kw["headers"] = self._set_proxy_headers(url, headers)
+
+        return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+    return ProxyManager(proxy_url=url, **kw)
diff --git a/venv/lib/python3.7/site-packages/urllib3/request.py b/venv/lib/python3.7/site-packages/urllib3/request.py
new file mode 100644
index 00000000..398386a5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/request.py
@@ -0,0 +1,170 @@
+from __future__ import absolute_import
+
+from .filepost import encode_multipart_formdata
+from .packages.six.moves.urllib.parse import urlencode
+
+__all__ = ["RequestMethods"]
+
+
+class RequestMethods(object):
+    """
+    Convenience mixin for classes who implement a :meth:`urlopen` method, such
+    as :class:`urllib3.HTTPConnectionPool` and
+    :class:`urllib3.PoolManager`.
+
+    Provides behavior for making common types of HTTP request methods and
+    decides which type of request field encoding to use.
+
+    Specifically,
+
+    :meth:`.request_encode_url` is for sending requests whose fields are
+    encoded in the URL (such as GET, HEAD, DELETE).
+
+    :meth:`.request_encode_body` is for sending requests whose fields are
+    encoded in the *body* of the request using multipart or www-form-urlencoded
+    (such as for POST, PUT, PATCH).
+
+    :meth:`.request` is for making any kind of request, it will look up the
+    appropriate encoding format and use one of the above two methods to make
+    the request.
+
+    Initializer parameters:
+
+    :param headers:
+        Headers to include with all requests, unless other headers are given
+        explicitly.
+    """
+
+    _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
+
+    def __init__(self, headers=None):
+        self.headers = headers or {}
+
+    def urlopen(
+        self,
+        method,
+        url,
+        body=None,
+        headers=None,
+        encode_multipart=True,
+        multipart_boundary=None,
+        **kw
+    ):  # Abstract
+        raise NotImplementedError(
+            "Classes extending RequestMethods must implement "
+            "their own ``urlopen`` method."
+        )
+
+    def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+        """
+        Make a request using :meth:`urlopen` with the appropriate encoding of
+        ``fields`` based on the ``method`` used.
+
+        This is a convenience method that requires the least amount of manual
+        effort. It can be used in most situations, while still having the
+        option to drop down to more specific methods when necessary, such as
+        :meth:`request_encode_url`, :meth:`request_encode_body`,
+        or even the lowest level :meth:`urlopen`.
+        """
+        method = method.upper()
+
+        urlopen_kw["request_url"] = url
+
+        if method in self._encode_url_methods:
+            return self.request_encode_url(
+                method, url, fields=fields, headers=headers, **urlopen_kw
+            )
+        else:
+            return self.request_encode_body(
+                method, url, fields=fields, headers=headers, **urlopen_kw
+            )
+
+    def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
+        """
+        Make a request using :meth:`urlopen` with the ``fields`` encoded in
+        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+        """
+        if headers is None:
+            headers = self.headers
+
+        extra_kw = {"headers": headers}
+        extra_kw.update(urlopen_kw)
+
+        if fields:
+            url += "?" + urlencode(fields)
+
+        return self.urlopen(method, url, **extra_kw)
+
+    def request_encode_body(
+        self,
+        method,
+        url,
+        fields=None,
+        headers=None,
+        encode_multipart=True,
+        multipart_boundary=None,
+        **urlopen_kw
+    ):
+        """
+        Make a request using :meth:`urlopen` with the ``fields`` encoded in
+        the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+        When ``encode_multipart=True`` (default), then
+        :func:`urllib3.encode_multipart_formdata` is used to encode
+        the payload with the appropriate content type. Otherwise
+        :func:`urllib.parse.urlencode` is used with the
+        'application/x-www-form-urlencoded' content type.
+
+        Multipart encoding must be used when posting files, and it's reasonably
+        safe to use it in other times too. However, it may break request
+        signing, such as with OAuth.
+
+        Supports an optional ``fields`` parameter of key/value strings AND
+        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+        the MIME type is optional. For example::
+
+            fields = {
+                'foo': 'bar',
+                'fakefile': ('foofile.txt', 'contents of foofile'),
+                'realfile': ('barfile.txt', open('realfile').read()),
+                'typedfile': ('bazfile.bin', open('bazfile').read(),
+                              'image/jpeg'),
+                'nonamefile': 'contents of nonamefile field',
+            }
+
+        When uploading a file, providing a filename (the first parameter of the
+        tuple) is optional but recommended to best mimic behavior of browsers.
+
+        Note that if ``headers`` are supplied, the 'Content-Type' header will
+        be overwritten because it depends on the dynamic random boundary string
+        which is used to compose the body of the request. The random boundary
+        string can be explicitly set with the ``multipart_boundary`` parameter.
+        """
+        if headers is None:
+            headers = self.headers
+
+        extra_kw = {"headers": {}}
+
+        if fields:
+            if "body" in urlopen_kw:
+                raise TypeError(
+                    "request got values for both 'fields' and 'body', can only specify one."
+                )
+
+            if encode_multipart:
+                body, content_type = encode_multipart_formdata(
+                    fields, boundary=multipart_boundary
+                )
+            else:
+                body, content_type = (
+                    urlencode(fields),
+                    "application/x-www-form-urlencoded",
+                )
+
+            extra_kw["body"] = body
+            extra_kw["headers"] = {"Content-Type": content_type}
+
+        extra_kw["headers"].update(headers)
+        extra_kw.update(urlopen_kw)
+
+        return self.urlopen(method, url, **extra_kw)
diff --git a/venv/lib/python3.7/site-packages/urllib3/response.py b/venv/lib/python3.7/site-packages/urllib3/response.py
new file mode 100644
index 00000000..38693f4f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/response.py
@@ -0,0 +1,821 @@
+from __future__ import absolute_import
+
+import io
+import logging
+import zlib
+from contextlib import contextmanager
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+try:
+    import brotli
+except ImportError:
+    brotli = None
+
+from ._collections import HTTPHeaderDict
+from .connection import BaseSSLError, HTTPException
+from .exceptions import (
+    BodyNotHttplibCompatible,
+    DecodeError,
+    HTTPError,
+    IncompleteRead,
+    InvalidChunkLength,
+    InvalidHeader,
+    ProtocolError,
+    ReadTimeoutError,
+    ResponseNotChunked,
+    SSLError,
+)
+from .packages import six
+from .util.response import is_fp_closed, is_response_to_head
+
+log = logging.getLogger(__name__)
+
+
+class DeflateDecoder(object):
+    def __init__(self):
+        self._first_try = True
+        self._data = b""
+        self._obj = zlib.decompressobj()
+
+    def __getattr__(self, name):
+        return getattr(self._obj, name)
+
+    def decompress(self, data):
+        if not data:
+            return data
+
+        if not self._first_try:
+            return self._obj.decompress(data)
+
+        self._data += data
+        try:
+            decompressed = self._obj.decompress(data)
+            if decompressed:
+                self._first_try = False
+                self._data = None
+            return decompressed
+        except zlib.error:
+            self._first_try = False
+            self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+            try:
+                return self.decompress(self._data)
+            finally:
+                self._data = None
+
+
+class GzipDecoderState(object):
+
+    FIRST_MEMBER = 0
+    OTHER_MEMBERS = 1
+    SWALLOW_DATA = 2
+
+
+class GzipDecoder(object):
+    def __init__(self):
+        self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+        self._state = GzipDecoderState.FIRST_MEMBER
+
+    def __getattr__(self, name):
+        return getattr(self._obj, name)
+
+    def decompress(self, data):
+        ret = bytearray()
+        if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+            return bytes(ret)
+        while True:
+            try:
+                ret += self._obj.decompress(data)
+            except zlib.error:
+                previous_state = self._state
+                # Ignore data after the first error
+                self._state = GzipDecoderState.SWALLOW_DATA
+                if previous_state == GzipDecoderState.OTHER_MEMBERS:
+                    # Allow trailing garbage acceptable in other gzip clients
+                    return bytes(ret)
+                raise
+            data = self._obj.unused_data
+            if not data:
+                return bytes(ret)
+            self._state = GzipDecoderState.OTHER_MEMBERS
+            self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+if brotli is not None:
+
+    class BrotliDecoder(object):
+        # Supports both 'brotlipy' and 'Brotli' packages
+        # since they share an import name. The top branches
+        # are for 'brotlipy' and bottom branches for 'Brotli'
+        def __init__(self):
+            self._obj = brotli.Decompressor()
+            if hasattr(self._obj, "decompress"):
+                self.decompress = self._obj.decompress
+            else:
+                self.decompress = self._obj.process
+
+        def flush(self):
+            if hasattr(self._obj, "flush"):
+                return self._obj.flush()
+            return b""
+
+
+class MultiDecoder(object):
+    """
+    From RFC7231:
+        If one or more encodings have been applied to a representation, the
+        sender that applied the encodings MUST generate a Content-Encoding
+        header field that lists the content codings in the order in which
+        they were applied.
+    """
+
+    def __init__(self, modes):
+        self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+
+    def flush(self):
+        return self._decoders[0].flush()
+
+    def decompress(self, data):
+        for d in reversed(self._decoders):
+            data = d.decompress(data)
+        return data
+
+
+def _get_decoder(mode):
+    if "," in mode:
+        return MultiDecoder(mode)
+
+    if mode == "gzip":
+        return GzipDecoder()
+
+    if brotli is not None and mode == "br":
+        return BrotliDecoder()
+
+    return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+    """
+    HTTP Response container.
+
+    Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
+    loaded and decoded on-demand when the ``data`` property is accessed.  This
+    class is also compatible with the Python standard library's :mod:`io`
+    module, and can hence be treated as a readable object in the context of that
+    framework.
+
+    Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
+
+    :param preload_content:
+        If True, the response's body will be preloaded during construction.
+
+    :param decode_content:
+        If True, will attempt to decode the body based on the
+        'content-encoding' header.
+
+    :param original_response:
+        When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
+        object, it's convenient to include the original for debug purposes. It's
+        otherwise unused.
+
+    :param retries:
+        The retries contains the last :class:`~urllib3.util.retry.Retry` that
+        was used during the request.
+
+    :param enforce_content_length:
+        Enforce content length checking. Body returned by server must match
+        value of Content-Length header, if present. Otherwise, raise error.
+    """
+
+    CONTENT_DECODERS = ["gzip", "deflate"]
+    if brotli is not None:
+        CONTENT_DECODERS += ["br"]
+    REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+    def __init__(
+        self,
+        body="",
+        headers=None,
+        status=0,
+        version=0,
+        reason=None,
+        strict=0,
+        preload_content=True,
+        decode_content=True,
+        original_response=None,
+        pool=None,
+        connection=None,
+        msg=None,
+        retries=None,
+        enforce_content_length=False,
+        request_method=None,
+        request_url=None,
+        auto_close=True,
+    ):
+
+        if isinstance(headers, HTTPHeaderDict):
+            self.headers = headers
+        else:
+            self.headers = HTTPHeaderDict(headers)
+        self.status = status
+        self.version = version
+        self.reason = reason
+        self.strict = strict
+        self.decode_content = decode_content
+        self.retries = retries
+        self.enforce_content_length = enforce_content_length
+        self.auto_close = auto_close
+
+        self._decoder = None
+        self._body = None
+        self._fp = None
+        self._original_response = original_response
+        self._fp_bytes_read = 0
+        self.msg = msg
+        self._request_url = request_url
+
+        if body and isinstance(body, (six.string_types, bytes)):
+            self._body = body
+
+        self._pool = pool
+        self._connection = connection
+
+        if hasattr(body, "read"):
+            self._fp = body
+
+        # Are we using the chunked-style of transfer encoding?
+        self.chunked = False
+        self.chunk_left = None
+        tr_enc = self.headers.get("transfer-encoding", "").lower()
+        # Don't incur the penalty of creating a list and then discarding it
+        encodings = (enc.strip() for enc in tr_enc.split(","))
+        if "chunked" in encodings:
+            self.chunked = True
+
+        # Determine length of response
+        self.length_remaining = self._init_length(request_method)
+
+        # If requested, preload the body.
+        if preload_content and not self._body:
+            self._body = self.read(decode_content=decode_content)
+
+    def get_redirect_location(self):
+        """
+        Should we redirect and where to?
+
+        :returns: Truthy redirect location string if we got a redirect status
+            code and valid location. ``None`` if redirect status and no
+            location. ``False`` if not a redirect status code.
+        """
+        if self.status in self.REDIRECT_STATUSES:
+            return self.headers.get("location")
+
+        return False
+
+    def release_conn(self):
+        if not self._pool or not self._connection:
+            return
+
+        self._pool._put_conn(self._connection)
+        self._connection = None
+
+    def drain_conn(self):
+        """
+        Read and discard any remaining HTTP response data in the response connection.
+
+        Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+        """
+        try:
+            self.read()
+        except (HTTPError, SocketError, BaseSSLError, HTTPException):
+            pass
+
+    @property
+    def data(self):
+        # For backwards-compat with earlier urllib3 0.4 and earlier.
+        if self._body:
+            return self._body
+
+        if self._fp:
+            return self.read(cache_content=True)
+
+    @property
+    def connection(self):
+        return self._connection
+
+    def isclosed(self):
+        return is_fp_closed(self._fp)
+
+    def tell(self):
+        """
+        Obtain the number of bytes pulled over the wire so far. May differ from
+        the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
+        if bytes are encoded on the wire (e.g, compressed).
+        """
+        return self._fp_bytes_read
+
+    def _init_length(self, request_method):
+        """
+        Set initial length value for Response content if available.
+        """
+        length = self.headers.get("content-length")
+
+        if length is not None:
+            if self.chunked:
+                # This Response will fail with an IncompleteRead if it can't be
+                # received as chunked. This method falls back to attempt reading
+                # the response before raising an exception.
+                log.warning(
+                    "Received response with both Content-Length and "
+                    "Transfer-Encoding set. This is expressly forbidden "
+                    "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+                    "attempting to process response as Transfer-Encoding: "
+                    "chunked."
+                )
+                return None
+
+            try:
+                # RFC 7230 section 3.3.2 specifies multiple content lengths can
+                # be sent in a single Content-Length header
+                # (e.g. Content-Length: 42, 42). This line ensures the values
+                # are all valid ints and that as long as the `set` length is 1,
+                # all values are the same. Otherwise, the header is invalid.
+                lengths = set([int(val) for val in length.split(",")])
+                if len(lengths) > 1:
+                    raise InvalidHeader(
+                        "Content-Length contained multiple "
+                        "unmatching values (%s)" % length
+                    )
+                length = lengths.pop()
+            except ValueError:
+                length = None
+            else:
+                if length < 0:
+                    length = None
+
+        # Convert status to int for comparison
+        # In some cases, httplib returns a status of "_UNKNOWN"
+        try:
+            status = int(self.status)
+        except ValueError:
+            status = 0
+
+        # Check for responses that shouldn't include a body
+        if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
+            length = 0
+
+        return length
+
+    def _init_decoder(self):
+        """
+        Set-up the _decoder attribute if necessary.
+        """
+        # Note: content-encoding value should be case-insensitive, per RFC 7230
+        # Section 3.2
+        content_encoding = self.headers.get("content-encoding", "").lower()
+        if self._decoder is None:
+            if content_encoding in self.CONTENT_DECODERS:
+                self._decoder = _get_decoder(content_encoding)
+            elif "," in content_encoding:
+                encodings = [
+                    e.strip()
+                    for e in content_encoding.split(",")
+                    if e.strip() in self.CONTENT_DECODERS
+                ]
+                if len(encodings):
+                    self._decoder = _get_decoder(content_encoding)
+
+    DECODER_ERROR_CLASSES = (IOError, zlib.error)
+    if brotli is not None:
+        DECODER_ERROR_CLASSES += (brotli.error,)
+
+    def _decode(self, data, decode_content, flush_decoder):
+        """
+        Decode the data passed in and potentially flush the decoder.
+        """
+        if not decode_content:
+            return data
+
+        try:
+            if self._decoder:
+                data = self._decoder.decompress(data)
+        except self.DECODER_ERROR_CLASSES as e:
+            content_encoding = self.headers.get("content-encoding", "").lower()
+            raise DecodeError(
+                "Received response with content-encoding: %s, but "
+                "failed to decode it." % content_encoding,
+                e,
+            )
+        if flush_decoder:
+            data += self._flush_decoder()
+
+        return data
+
+    def _flush_decoder(self):
+        """
+        Flushes the decoder. Should only be called if the decoder is actually
+        being used.
+        """
+        if self._decoder:
+            buf = self._decoder.decompress(b"")
+            return buf + self._decoder.flush()
+
+        return b""
+
+    @contextmanager
+    def _error_catcher(self):
+        """
+        Catch low-level python exceptions, instead re-raising urllib3
+        variants, so that low-level exceptions are not leaked in the
+        high-level api.
+
+        On exit, release the connection back to the pool.
+        """
+        clean_exit = False
+
+        try:
+            try:
+                yield
+
+            except SocketTimeout:
+                # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+                # there is yet no clean way to get at it from this context.
+                raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+            except BaseSSLError as e:
+                # FIXME: Is there a better way to differentiate between SSLErrors?
+                if "read operation timed out" not in str(e):
+                    # SSL errors related to framing/MAC get wrapped and reraised here
+                    raise SSLError(e)
+
+                raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+            except (HTTPException, SocketError) as e:
+                # This includes IncompleteRead.
+                raise ProtocolError("Connection broken: %r" % e, e)
+
+            # If no exception is thrown, we should avoid cleaning up
+            # unnecessarily.
+            clean_exit = True
+        finally:
+            # If we didn't terminate cleanly, we need to throw away our
+            # connection.
+            if not clean_exit:
+                # The response may not be closed but we're not going to use it
+                # anymore so close it now to ensure that the connection is
+                # released back to the pool.
+                if self._original_response:
+                    self._original_response.close()
+
+                # Closing the response may not actually be sufficient to close
+                # everything, so if we have a hold of the connection close that
+                # too.
+                if self._connection:
+                    self._connection.close()
+
+            # If we hold the original response but it's closed now, we should
+            # return the connection back to the pool.
+            if self._original_response and self._original_response.isclosed():
+                self.release_conn()
+
+    def read(self, amt=None, decode_content=None, cache_content=False):
+        """
+        Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+        parameters: ``decode_content`` and ``cache_content``.
+
+        :param amt:
+            How much of the content to read. If specified, caching is skipped
+            because it doesn't make sense to cache partial content as the full
+            response.
+
+        :param decode_content:
+            If True, will attempt to decode the body based on the
+            'content-encoding' header.
+
+        :param cache_content:
+            If True, will save the returned data such that the same result is
+            returned despite of the state of the underlying file object. This
+            is useful if you want the ``.data`` property to continue working
+            after having ``.read()`` the file object. (Overridden if ``amt`` is
+            set.)
+        """
+        self._init_decoder()
+        if decode_content is None:
+            decode_content = self.decode_content
+
+        if self._fp is None:
+            return
+
+        flush_decoder = False
+        fp_closed = getattr(self._fp, "closed", False)
+
+        with self._error_catcher():
+            if amt is None:
+                # cStringIO doesn't like amt=None
+                data = self._fp.read() if not fp_closed else b""
+                flush_decoder = True
+            else:
+                cache_content = False
+                data = self._fp.read(amt) if not fp_closed else b""
+                if (
+                    amt != 0 and not data
+                ):  # Platform-specific: Buggy versions of Python.
+                    # Close the connection when no data is returned
+                    #
+                    # This is redundant to what httplib/http.client _should_
+                    # already do.  However, versions of python released before
+                    # December 15, 2012 (http://bugs.python.org/issue16298) do
+                    # not properly close the connection in all cases. There is
+                    # no harm in redundantly calling close.
+                    self._fp.close()
+                    flush_decoder = True
+                    if self.enforce_content_length and self.length_remaining not in (
+                        0,
+                        None,
+                    ):
+                        # This is an edge case that httplib failed to cover due
+                        # to concerns of backward compatibility. We're
+                        # addressing it here to make sure IncompleteRead is
+                        # raised during streaming, so all calls with incorrect
+                        # Content-Length are caught.
+                        raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+
+        if data:
+            self._fp_bytes_read += len(data)
+            if self.length_remaining is not None:
+                self.length_remaining -= len(data)
+
+            data = self._decode(data, decode_content, flush_decoder)
+
+            if cache_content:
+                self._body = data
+
+        return data
+
+    def stream(self, amt=2 ** 16, decode_content=None):
+        """
+        A generator wrapper for the read() method. A call will block until
+        ``amt`` bytes have been read from the connection or until the
+        connection is closed.
+
+        :param amt:
+            How much of the content to read. The generator will return up to
+            much data per iteration, but may return less. This is particularly
+            likely when using compressed data. However, the empty string will
+            never be returned.
+
+        :param decode_content:
+            If True, will attempt to decode the body based on the
+            'content-encoding' header.
+        """
+        if self.chunked and self.supports_chunked_reads():
+            for line in self.read_chunked(amt, decode_content=decode_content):
+                yield line
+        else:
+            while not is_fp_closed(self._fp):
+                data = self.read(amt=amt, decode_content=decode_content)
+
+                if data:
+                    yield data
+
+    @classmethod
+    def from_httplib(ResponseCls, r, **response_kw):
+        """
+        Given an :class:`http.client.HTTPResponse` instance ``r``, return a
+        corresponding :class:`urllib3.response.HTTPResponse` object.
+
+        Remaining parameters are passed to the HTTPResponse constructor, along
+        with ``original_response=r``.
+        """
+        headers = r.msg
+
+        if not isinstance(headers, HTTPHeaderDict):
+            if six.PY2:
+                # Python 2.7
+                headers = HTTPHeaderDict.from_httplib(headers)
+            else:
+                headers = HTTPHeaderDict(headers.items())
+
+        # HTTPResponse objects in Python 3 don't have a .strict attribute
+        strict = getattr(r, "strict", 0)
+        resp = ResponseCls(
+            body=r,
+            headers=headers,
+            status=r.status,
+            version=r.version,
+            reason=r.reason,
+            strict=strict,
+            original_response=r,
+            **response_kw
+        )
+        return resp
+
+    # Backwards-compatibility methods for http.client.HTTPResponse
+    def getheaders(self):
+        return self.headers
+
+    def getheader(self, name, default=None):
+        return self.headers.get(name, default)
+
+    # Backwards compatibility for http.cookiejar
+    def info(self):
+        return self.headers
+
+    # Overrides from io.IOBase
+    def close(self):
+        if not self.closed:
+            self._fp.close()
+
+        if self._connection:
+            self._connection.close()
+
+        if not self.auto_close:
+            io.IOBase.close(self)
+
+    @property
+    def closed(self):
+        if not self.auto_close:
+            return io.IOBase.closed.__get__(self)
+        elif self._fp is None:
+            return True
+        elif hasattr(self._fp, "isclosed"):
+            return self._fp.isclosed()
+        elif hasattr(self._fp, "closed"):
+            return self._fp.closed
+        else:
+            return True
+
+    def fileno(self):
+        if self._fp is None:
+            raise IOError("HTTPResponse has no file to get a fileno from")
+        elif hasattr(self._fp, "fileno"):
+            return self._fp.fileno()
+        else:
+            raise IOError(
+                "The file-like object this HTTPResponse is wrapped "
+                "around has no file descriptor"
+            )
+
+    def flush(self):
+        if (
+            self._fp is not None
+            and hasattr(self._fp, "flush")
+            and not getattr(self._fp, "closed", False)
+        ):
+            return self._fp.flush()
+
+    def readable(self):
+        # This method is required for `io` module compatibility.
+        return True
+
+    def readinto(self, b):
+        # This method is required for `io` module compatibility.
+        temp = self.read(len(b))
+        if len(temp) == 0:
+            return 0
+        else:
+            b[: len(temp)] = temp
+            return len(temp)
+
+    def supports_chunked_reads(self):
+        """
+        Checks if the underlying file-like object looks like a
+        :class:`http.client.HTTPResponse` object. We do this by testing for
+        the fp attribute. If it is present we assume it returns raw chunks as
+        processed by read_chunked().
+        """
+        return hasattr(self._fp, "fp")
+
+    def _update_chunk_length(self):
+        # First, we'll figure out length of a chunk and then
+        # we'll try to read it from socket.
+        if self.chunk_left is not None:
+            return
+        line = self._fp.fp.readline()
+        line = line.split(b";", 1)[0]
+        try:
+            self.chunk_left = int(line, 16)
+        except ValueError:
+            # Invalid chunked protocol response, abort.
+            self.close()
+            raise InvalidChunkLength(self, line)
+
+    def _handle_chunk(self, amt):
+        returned_chunk = None
+        if amt is None:
+            chunk = self._fp._safe_read(self.chunk_left)
+            returned_chunk = chunk
+            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
+            self.chunk_left = None
+        elif amt < self.chunk_left:
+            value = self._fp._safe_read(amt)
+            self.chunk_left = self.chunk_left - amt
+            returned_chunk = value
+        elif amt == self.chunk_left:
+            value = self._fp._safe_read(amt)
+            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
+            self.chunk_left = None
+            returned_chunk = value
+        else:  # amt > self.chunk_left
+            returned_chunk = self._fp._safe_read(self.chunk_left)
+            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
+            self.chunk_left = None
+        return returned_chunk
+
+    def read_chunked(self, amt=None, decode_content=None):
+        """
+        Similar to :meth:`HTTPResponse.read`, but with an additional
+        parameter: ``decode_content``.
+
+        :param amt:
+            How much of the content to read. If specified, caching is skipped
+            because it doesn't make sense to cache partial content as the full
+            response.
+
+        :param decode_content:
+            If True, will attempt to decode the body based on the
+            'content-encoding' header.
+        """
+        self._init_decoder()
+        # FIXME: Rewrite this method and make it a class with a better structured logic.
+        if not self.chunked:
+            raise ResponseNotChunked(
+                "Response is not chunked. "
+                "Header 'transfer-encoding: chunked' is missing."
+            )
+        if not self.supports_chunked_reads():
+            raise BodyNotHttplibCompatible(
+                "Body should be http.client.HTTPResponse like. "
+                "It should have have an fp attribute which returns raw chunks."
+            )
+
+        with self._error_catcher():
+            # Don't bother reading the body of a HEAD request.
+            if self._original_response and is_response_to_head(self._original_response):
+                self._original_response.close()
+                return
+
+            # If a response is already read and closed
+            # then return immediately.
+            if self._fp.fp is None:
+                return
+
+            while True:
+                self._update_chunk_length()
+                if self.chunk_left == 0:
+                    break
+                chunk = self._handle_chunk(amt)
+                decoded = self._decode(
+                    chunk, decode_content=decode_content, flush_decoder=False
+                )
+                if decoded:
+                    yield decoded
+
+            if decode_content:
+                # On CPython and PyPy, we should never need to flush the
+                # decoder. However, on Jython we *might* need to, so
+                # lets defensively do it anyway.
+                decoded = self._flush_decoder()
+                if decoded:  # Platform-specific: Jython.
+                    yield decoded
+
+            # Chunk content ends with \r\n: discard it.
+            while True:
+                line = self._fp.fp.readline()
+                if not line:
+                    # Some sites may not end with '\r\n'.
+                    break
+                if line == b"\r\n":
+                    break
+
+            # We read everything; close the "file".
+            if self._original_response:
+                self._original_response.close()
+
+    def geturl(self):
+        """
+        Returns the URL that was the source of this response.
+        If the request that generated this response redirected, this method
+        will return the final redirect location.
+        """
+        if self.retries is not None and len(self.retries.history):
+            return self.retries.history[-1].redirect_location
+        else:
+            return self._request_url
+
+    def __iter__(self):
+        buffer = []
+        for chunk in self.stream(decode_content=True):
+            if b"\n" in chunk:
+                chunk = chunk.split(b"\n")
+                yield b"".join(buffer) + chunk[0] + b"\n"
+                for x in chunk[1:-1]:
+                    yield x + b"\n"
+                if chunk[-1]:
+                    buffer = [chunk[-1]]
+                else:
+                    buffer = []
+            else:
+                buffer.append(chunk)
+        if buffer:
+            yield b"".join(buffer)
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/__init__.py b/venv/lib/python3.7/site-packages/urllib3/util/__init__.py
new file mode 100644
index 00000000..4547fc52
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/__init__.py
@@ -0,0 +1,49 @@
+from __future__ import absolute_import
+
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
+from .response import is_fp_closed
+from .retry import Retry
+from .ssl_ import (
+    ALPN_PROTOCOLS,
+    HAS_SNI,
+    IS_PYOPENSSL,
+    IS_SECURETRANSPORT,
+    PROTOCOL_TLS,
+    SSLContext,
+    assert_fingerprint,
+    resolve_cert_reqs,
+    resolve_ssl_version,
+    ssl_wrap_socket,
+)
+from .timeout import Timeout, current_time
+from .url import Url, get_host, parse_url, split_first
+from .wait import wait_for_read, wait_for_write
+
+__all__ = (
+    "HAS_SNI",
+    "IS_PYOPENSSL",
+    "IS_SECURETRANSPORT",
+    "SSLContext",
+    "PROTOCOL_TLS",
+    "ALPN_PROTOCOLS",
+    "Retry",
+    "Timeout",
+    "Url",
+    "assert_fingerprint",
+    "current_time",
+    "is_connection_dropped",
+    "is_fp_closed",
+    "get_host",
+    "parse_url",
+    "make_headers",
+    "resolve_cert_reqs",
+    "resolve_ssl_version",
+    "split_first",
+    "ssl_wrap_socket",
+    "wait_for_read",
+    "wait_for_write",
+    "SKIP_HEADER",
+    "SKIPPABLE_HEADERS",
+)
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/connection.py b/venv/lib/python3.7/site-packages/urllib3/util/connection.py
new file mode 100644
index 00000000..cd574557
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/connection.py
@@ -0,0 +1,150 @@
+from __future__ import absolute_import
+
+import socket
+
+from urllib3.exceptions import LocationParseError
+
+from ..contrib import _appengine_environ
+from ..packages import six
+from .wait import NoWayToWaitForSocketError, wait_for_read
+
+
+def is_connection_dropped(conn):  # Platform-specific
+    """
+    Returns True if the connection is dropped and should be closed.
+
+    :param conn:
+        :class:`http.client.HTTPConnection` object.
+
+    Note: For platforms like AppEngine, this will always return ``False`` to
+    let the platform handle connection recycling transparently for us.
+    """
+    sock = getattr(conn, "sock", False)
+    if sock is False:  # Platform-specific: AppEngine
+        return False
+    if sock is None:  # Connection already closed (such as by httplib).
+        return True
+    try:
+        # Returns True if readable, which here means it's been dropped
+        return wait_for_read(sock, timeout=0.0)
+    except NoWayToWaitForSocketError:  # Platform-specific: AppEngine
+        return False
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
+def create_connection(
+    address,
+    timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+    source_address=None,
+    socket_options=None,
+):
+    """Connect to *address* and return the socket object.
+
+    Convenience function.  Connect to *address* (a 2-tuple ``(host,
+    port)``) and return the socket object.  Passing the optional
+    *timeout* parameter will set the timeout on the socket instance
+    before attempting to connect.  If no *timeout* is supplied, the
+    global default timeout setting returned by :func:`socket.getdefaulttimeout`
+    is used.  If *source_address* is set it must be a tuple of (host, port)
+    for the socket to bind as a source address before making the connection.
+    An host of '' or port 0 tells the OS to use the default.
+    """
+
+    host, port = address
+    if host.startswith("["):
+        host = host.strip("[]")
+    err = None
+
+    # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+    # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+    # The original create_connection function always returns all records.
+    family = allowed_gai_family()
+
+    try:
+        host.encode("idna")
+    except UnicodeError:
+        return six.raise_from(
+            LocationParseError(u"'%s', label empty or too long" % host), None
+        )
+
+    for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+        af, socktype, proto, canonname, sa = res
+        sock = None
+        try:
+            sock = socket.socket(af, socktype, proto)
+
+            # If provided, set socket level options before connecting.
+            _set_socket_options(sock, socket_options)
+
+            if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+                sock.settimeout(timeout)
+            if source_address:
+                sock.bind(source_address)
+            sock.connect(sa)
+            return sock
+
+        except socket.error as e:
+            err = e
+            if sock is not None:
+                sock.close()
+                sock = None
+
+    if err is not None:
+        raise err
+
+    raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+    if options is None:
+        return
+
+    for opt in options:
+        sock.setsockopt(*opt)
+
+
+def allowed_gai_family():
+    """This function is designed to work in the context of
+    getaddrinfo, where family=socket.AF_UNSPEC is the default and
+    will perform a DNS search for both IPv6 and IPv4 records."""
+
+    family = socket.AF_INET
+    if HAS_IPV6:
+        family = socket.AF_UNSPEC
+    return family
+
+
+def _has_ipv6(host):
+    """ Returns True if the system can bind an IPv6 address. """
+    sock = None
+    has_ipv6 = False
+
+    # App Engine doesn't support IPV6 sockets and actually has a quota on the
+    # number of sockets that can be used, so just early out here instead of
+    # creating a socket needlessly.
+    # See https://github.com/urllib3/urllib3/issues/1446
+    if _appengine_environ.is_appengine_sandbox():
+        return False
+
+    if socket.has_ipv6:
+        # has_ipv6 returns true if cPython was compiled with IPv6 support.
+        # It does not tell us if the system has IPv6 support enabled. To
+        # determine that we must bind to an IPv6 address.
+        # https://github.com/urllib3/urllib3/pull/611
+        # https://bugs.python.org/issue658327
+        try:
+            sock = socket.socket(socket.AF_INET6)
+            sock.bind((host, 0))
+            has_ipv6 = True
+        except Exception:
+            pass
+
+    if sock:
+        sock.close()
+    return has_ipv6
+
+
+HAS_IPV6 = _has_ipv6("::1")
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/proxy.py b/venv/lib/python3.7/site-packages/urllib3/util/proxy.py
new file mode 100644
index 00000000..34f884d5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/proxy.py
@@ -0,0 +1,56 @@
+from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
+
+
+def connection_requires_http_tunnel(
+    proxy_url=None, proxy_config=None, destination_scheme=None
+):
+    """
+    Returns True if the connection requires an HTTP CONNECT through the proxy.
+
+    :param URL proxy_url:
+        URL of the proxy.
+    :param ProxyConfig proxy_config:
+        Proxy configuration from poolmanager.py
+    :param str destination_scheme:
+        The scheme of the destination. (i.e https, http, etc)
+    """
+    # If we're not using a proxy, no way to use a tunnel.
+    if proxy_url is None:
+        return False
+
+    # HTTP destinations never require tunneling, we always forward.
+    if destination_scheme == "http":
+        return False
+
+    # Support for forwarding with HTTPS proxies and HTTPS destinations.
+    if (
+        proxy_url.scheme == "https"
+        and proxy_config
+        and proxy_config.use_forwarding_for_https
+    ):
+        return False
+
+    # Otherwise always use a tunnel.
+    return True
+
+
+def create_proxy_ssl_context(
+    ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
+):
+    """
+    Generates a default proxy ssl context if one hasn't been provided by the
+    user.
+    """
+    ssl_context = create_urllib3_context(
+        ssl_version=resolve_ssl_version(ssl_version),
+        cert_reqs=resolve_cert_reqs(cert_reqs),
+    )
+    if (
+        not ca_certs
+        and not ca_cert_dir
+        and not ca_cert_data
+        and hasattr(ssl_context, "load_default_certs")
+    ):
+        ssl_context.load_default_certs()
+
+    return ssl_context
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/queue.py b/venv/lib/python3.7/site-packages/urllib3/util/queue.py
new file mode 100644
index 00000000..41784104
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/queue.py
@@ -0,0 +1,22 @@
+import collections
+
+from ..packages import six
+from ..packages.six.moves import queue
+
+if six.PY2:
+    # Queue is imported for side effects on MS Windows. See issue #229.
+    import Queue as _unused_module_Queue  # noqa: F401
+
+
+class LifoQueue(queue.Queue):
+    def _init(self, _):
+        self.queue = collections.deque()
+
+    def _qsize(self, len=len):
+        return len(self.queue)
+
+    def _put(self, item):
+        self.queue.append(item)
+
+    def _get(self):
+        return self.queue.pop()
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/request.py b/venv/lib/python3.7/site-packages/urllib3/util/request.py
new file mode 100644
index 00000000..25103383
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/request.py
@@ -0,0 +1,143 @@
+from __future__ import absolute_import
+
+from base64 import b64encode
+
+from ..exceptions import UnrewindableBodyError
+from ..packages.six import b, integer_types
+
+# Pass as a value within ``headers`` to skip
+# emitting some HTTP headers that are added automatically.
+# The only headers that are supported are ``Accept-Encoding``,
+# ``Host``, and ``User-Agent``.
+SKIP_HEADER = "@@@SKIP_HEADER@@@"
+SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+
+ACCEPT_ENCODING = "gzip,deflate"
+try:
+    import brotli as _unused_module_brotli  # noqa: F401
+except ImportError:
+    pass
+else:
+    ACCEPT_ENCODING += ",br"
+
+_FAILEDTELL = object()
+
+
+def make_headers(
+    keep_alive=None,
+    accept_encoding=None,
+    user_agent=None,
+    basic_auth=None,
+    proxy_basic_auth=None,
+    disable_cache=None,
+):
+    """
+    Shortcuts for generating request headers.
+
+    :param keep_alive:
+        If ``True``, adds 'connection: keep-alive' header.
+
+    :param accept_encoding:
+        Can be a boolean, list, or string.
+        ``True`` translates to 'gzip,deflate'.
+        List will get joined by comma.
+        String will be used as provided.
+
+    :param user_agent:
+        String representing the user-agent you want, such as
+        "python-urllib3/0.6"
+
+    :param basic_auth:
+        Colon-separated username:password string for 'authorization: basic ...'
+        auth header.
+
+    :param proxy_basic_auth:
+        Colon-separated username:password string for 'proxy-authorization: basic ...'
+        auth header.
+
+    :param disable_cache:
+        If ``True``, adds 'cache-control: no-cache' header.
+
+    Example::
+
+        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+        >>> make_headers(accept_encoding=True)
+        {'accept-encoding': 'gzip,deflate'}
+    """
+    headers = {}
+    if accept_encoding:
+        if isinstance(accept_encoding, str):
+            pass
+        elif isinstance(accept_encoding, list):
+            accept_encoding = ",".join(accept_encoding)
+        else:
+            accept_encoding = ACCEPT_ENCODING
+        headers["accept-encoding"] = accept_encoding
+
+    if user_agent:
+        headers["user-agent"] = user_agent
+
+    if keep_alive:
+        headers["connection"] = "keep-alive"
+
+    if basic_auth:
+        headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
+
+    if proxy_basic_auth:
+        headers["proxy-authorization"] = "Basic " + b64encode(
+            b(proxy_basic_auth)
+        ).decode("utf-8")
+
+    if disable_cache:
+        headers["cache-control"] = "no-cache"
+
+    return headers
+
+
+def set_file_position(body, pos):
+    """
+    If a position is provided, move file to that point.
+    Otherwise, we'll attempt to record a position for future use.
+    """
+    if pos is not None:
+        rewind_body(body, pos)
+    elif getattr(body, "tell", None) is not None:
+        try:
+            pos = body.tell()
+        except (IOError, OSError):
+            # This differentiates from None, allowing us to catch
+            # a failed `tell()` later when trying to rewind the body.
+            pos = _FAILEDTELL
+
+    return pos
+
+
+def rewind_body(body, body_pos):
+    """
+    Attempt to rewind body to a certain position.
+    Primarily used for request redirects and retries.
+
+    :param body:
+        File-like object that supports seek.
+
+    :param int pos:
+        Position to seek to in file.
+    """
+    body_seek = getattr(body, "seek", None)
+    if body_seek is not None and isinstance(body_pos, integer_types):
+        try:
+            body_seek(body_pos)
+        except (IOError, OSError):
+            raise UnrewindableBodyError(
+                "An error occurred when rewinding request body for redirect/retry."
+            )
+    elif body_pos is _FAILEDTELL:
+        raise UnrewindableBodyError(
+            "Unable to record file position for rewinding "
+            "request body during a redirect/retry."
+        )
+    else:
+        raise ValueError(
+            "body_pos must be of type integer, instead it was %s." % type(body_pos)
+        )
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/response.py b/venv/lib/python3.7/site-packages/urllib3/util/response.py
new file mode 100644
index 00000000..5ea609cc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/response.py
@@ -0,0 +1,107 @@
+from __future__ import absolute_import
+
+from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
+
+from ..exceptions import HeaderParsingError
+from ..packages.six.moves import http_client as httplib
+
+
+def is_fp_closed(obj):
+    """
+    Checks whether a given file-like object is closed.
+
+    :param obj:
+        The file-like object to check.
+    """
+
+    try:
+        # Check `isclosed()` first, in case Python3 doesn't set `closed`.
+        # GH Issue #928
+        return obj.isclosed()
+    except AttributeError:
+        pass
+
+    try:
+        # Check via the official file-like-object way.
+        return obj.closed
+    except AttributeError:
+        pass
+
+    try:
+        # Check if the object is a container for another file-like object that
+        # gets released on exhaustion (e.g. HTTPResponse).
+        return obj.fp is None
+    except AttributeError:
+        pass
+
+    raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+    """
+    Asserts whether all headers have been successfully parsed.
+    Extracts encountered errors from the result of parsing headers.
+
+    Only works on Python 3.
+
+    :param http.client.HTTPMessage headers: Headers to verify.
+
+    :raises urllib3.exceptions.HeaderParsingError:
+        If parsing errors are found.
+    """
+
+    # This will fail silently if we pass in the wrong kind of parameter.
+    # To make debugging easier add an explicit check.
+    if not isinstance(headers, httplib.HTTPMessage):
+        raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
+
+    defects = getattr(headers, "defects", None)
+    get_payload = getattr(headers, "get_payload", None)
+
+    unparsed_data = None
+    if get_payload:
+        # get_payload is actually email.message.Message.get_payload;
+        # we're only interested in the result if it's not a multipart message
+        if not headers.is_multipart():
+            payload = get_payload()
+
+            if isinstance(payload, (bytes, str)):
+                unparsed_data = payload
+    if defects:
+        # httplib is assuming a response body is available
+        # when parsing headers even when httplib only sends
+        # header data to parse_headers() This results in
+        # defects on multipart responses in particular.
+        # See: https://github.com/urllib3/urllib3/issues/800
+
+        # So we ignore the following defects:
+        # - StartBoundaryNotFoundDefect:
+        #     The claimed start boundary was never found.
+        # - MultipartInvariantViolationDefect:
+        #     A message claimed to be a multipart but no subparts were found.
+        defects = [
+            defect
+            for defect in defects
+            if not isinstance(
+                defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
+            )
+        ]
+
+    if defects or unparsed_data:
+        raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+    """
+    Checks whether the request of a response has been a HEAD-request.
+    Handles the quirks of AppEngine.
+
+    :param http.client.HTTPResponse response:
+        Response to check if the originating request
+        used 'HEAD' as a method.
+    """
+    # FIXME: Can we do this somehow without accessing private httplib _method?
+    method = response._method
+    if isinstance(method, int):  # Platform-specific: Appengine
+        return method == 3
+    return method.upper() == "HEAD"
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/retry.py b/venv/lib/python3.7/site-packages/urllib3/util/retry.py
new file mode 100644
index 00000000..d25a41b4
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/retry.py
@@ -0,0 +1,602 @@
+from __future__ import absolute_import
+
+import email
+import logging
+import re
+import time
+import warnings
+from collections import namedtuple
+from itertools import takewhile
+
+from ..exceptions import (
+    ConnectTimeoutError,
+    InvalidHeader,
+    MaxRetryError,
+    ProtocolError,
+    ProxyError,
+    ReadTimeoutError,
+    ResponseError,
+)
+from ..packages import six
+
+log = logging.getLogger(__name__)
+
+
+# Data structure for representing the metadata of requests that result in a retry.
+RequestHistory = namedtuple(
+    "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
+)
+
+
+# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
+_Default = object()
+
+
+class _RetryMeta(type):
+    @property
+    def DEFAULT_METHOD_WHITELIST(cls):
+        warnings.warn(
+            "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+            "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
+            DeprecationWarning,
+        )
+        return cls.DEFAULT_ALLOWED_METHODS
+
+    @DEFAULT_METHOD_WHITELIST.setter
+    def DEFAULT_METHOD_WHITELIST(cls, value):
+        warnings.warn(
+            "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+            "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+            DeprecationWarning,
+        )
+        cls.DEFAULT_ALLOWED_METHODS = value
+
+    @property
+    def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
+        warnings.warn(
+            "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+            "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+            DeprecationWarning,
+        )
+        return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+    @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
+    def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
+        warnings.warn(
+            "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+            "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+            DeprecationWarning,
+        )
+        cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+
+
+@six.add_metaclass(_RetryMeta)
+class Retry(object):
+    """Retry configuration.
+
+    Each retry attempt will create a new Retry object with updated values, so
+    they can be safely reused.
+
+    Retries can be defined as a default for a pool::
+
+        retries = Retry(connect=5, read=2, redirect=5)
+        http = PoolManager(retries=retries)
+        response = http.request('GET', 'http://example.com/')
+
+    Or per-request (which overrides the default for the pool)::
+
+        response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+    Retries can be disabled by passing ``False``::
+
+        response = http.request('GET', 'http://example.com/', retries=False)
+
+    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+    retries are disabled, in which case the causing exception will be raised.
+
+    :param int total:
+        Total number of retries to allow. Takes precedence over other counts.
+
+        Set to ``None`` to remove this constraint and fall back on other
+        counts.
+
+        Set to ``0`` to fail on the first retry.
+
+        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+    :param int connect:
+        How many connection-related errors to retry on.
+
+        These are errors raised before the request is sent to the remote server,
+        which we assume has not triggered the server to process the request.
+
+        Set to ``0`` to fail on the first retry of this type.
+
+    :param int read:
+        How many times to retry on read errors.
+
+        These errors are raised after the request was sent to the server, so the
+        request may have side-effects.
+
+        Set to ``0`` to fail on the first retry of this type.
+
+    :param int redirect:
+        How many redirects to perform. Limit this to avoid infinite redirect
+        loops.
+
+        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+        308.
+
+        Set to ``0`` to fail on the first retry of this type.
+
+        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+    :param int status:
+        How many times to retry on bad status codes.
+
+        These are retries made on responses, where status code matches
+        ``status_forcelist``.
+
+        Set to ``0`` to fail on the first retry of this type.
+
+    :param int other:
+        How many times to retry on other errors.
+
+        Other errors are errors that are not connect, read, redirect or status errors.
+        These errors might be raised after the request was sent to the server, so the
+        request might have side-effects.
+
+        Set to ``0`` to fail on the first retry of this type.
+
+        If ``total`` is not set, it's a good idea to set this to 0 to account
+        for unexpected edge cases and avoid infinite retry loops.
+
+    :param iterable allowed_methods:
+        Set of uppercased HTTP method verbs that we should retry on.
+
+        By default, we only retry on methods which are considered to be
+        idempotent (multiple requests with the same parameters end with the
+        same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
+
+        Set to a ``False`` value to retry on any verb.
+
+        .. warning::
+
+            Previously this parameter was named ``method_whitelist``, that
+            usage is deprecated in v1.26.0 and will be removed in v2.0.
+
+    :param iterable status_forcelist:
+        A set of integer HTTP status codes that we should force a retry on.
+        A retry is initiated if the request method is in ``allowed_methods``
+        and the response status code is in ``status_forcelist``.
+
+        By default, this is disabled with ``None``.
+
+    :param float backoff_factor:
+        A backoff factor to apply between attempts after the second try
+        (most errors are resolved immediately by a second try without a
+        delay). urllib3 will sleep for::
+
+            {backoff factor} * (2 ** ({number of total retries} - 1))
+
+        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+        for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
+        than :attr:`Retry.BACKOFF_MAX`.
+
+        By default, backoff is disabled (set to 0).
+
+    :param bool raise_on_redirect: Whether, if the number of redirects is
+        exhausted, to raise a MaxRetryError, or to return a response with a
+        response code in the 3xx range.
+
+    :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+        whether we should raise an exception, or return a response,
+        if status falls in ``status_forcelist`` range and retries have
+        been exhausted.
+
+    :param tuple history: The history of the request encountered during
+        each call to :meth:`~Retry.increment`. The list is in the order
+        the requests occurred. Each list item is of class :class:`RequestHistory`.
+
+    :param bool respect_retry_after_header:
+        Whether to respect Retry-After header on status codes defined as
+        :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+
+    :param iterable remove_headers_on_redirect:
+        Sequence of headers to remove from the request when a response
+        indicating a redirect is returned before firing off the redirected
+        request.
+    """
+
+    #: Default methods to be used for ``allowed_methods``
+    DEFAULT_ALLOWED_METHODS = frozenset(
+        ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+    )
+
+    #: Default status codes to be used for ``status_forcelist``
+    RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+
+    #: Default headers to be used for ``remove_headers_on_redirect``
+    DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
+
+    #: Maximum backoff time.
+    BACKOFF_MAX = 120
+
+    def __init__(
+        self,
+        total=10,
+        connect=None,
+        read=None,
+        redirect=None,
+        status=None,
+        other=None,
+        allowed_methods=_Default,
+        status_forcelist=None,
+        backoff_factor=0,
+        raise_on_redirect=True,
+        raise_on_status=True,
+        history=None,
+        respect_retry_after_header=True,
+        remove_headers_on_redirect=_Default,
+        # TODO: Deprecated, remove in v2.0
+        method_whitelist=_Default,
+    ):
+
+        if method_whitelist is not _Default:
+            if allowed_methods is not _Default:
+                raise ValueError(
+                    "Using both 'allowed_methods' and "
+                    "'method_whitelist' together is not allowed. "
+                    "Instead only use 'allowed_methods'"
+                )
+            warnings.warn(
+                "Using 'method_whitelist' with Retry is deprecated and "
+                "will be removed in v2.0. Use 'allowed_methods' instead",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            allowed_methods = method_whitelist
+        if allowed_methods is _Default:
+            allowed_methods = self.DEFAULT_ALLOWED_METHODS
+        if remove_headers_on_redirect is _Default:
+            remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+        self.total = total
+        self.connect = connect
+        self.read = read
+        self.status = status
+        self.other = other
+
+        if redirect is False or total is False:
+            redirect = 0
+            raise_on_redirect = False
+
+        self.redirect = redirect
+        self.status_forcelist = status_forcelist or set()
+        self.allowed_methods = allowed_methods
+        self.backoff_factor = backoff_factor
+        self.raise_on_redirect = raise_on_redirect
+        self.raise_on_status = raise_on_status
+        self.history = history or tuple()
+        self.respect_retry_after_header = respect_retry_after_header
+        self.remove_headers_on_redirect = frozenset(
+            [h.lower() for h in remove_headers_on_redirect]
+        )
+
+    def new(self, **kw):
+        params = dict(
+            total=self.total,
+            connect=self.connect,
+            read=self.read,
+            redirect=self.redirect,
+            status=self.status,
+            other=self.other,
+            status_forcelist=self.status_forcelist,
+            backoff_factor=self.backoff_factor,
+            raise_on_redirect=self.raise_on_redirect,
+            raise_on_status=self.raise_on_status,
+            history=self.history,
+            remove_headers_on_redirect=self.remove_headers_on_redirect,
+            respect_retry_after_header=self.respect_retry_after_header,
+        )
+
+        # TODO: If already given in **kw we use what's given to us
+        # If not given we need to figure out what to pass. We decide
+        # based on whether our class has the 'method_whitelist' property
+        # and if so we pass the deprecated 'method_whitelist' otherwise
+        # we use 'allowed_methods'. Remove in v2.0
+        if "method_whitelist" not in kw and "allowed_methods" not in kw:
+            if "method_whitelist" in self.__dict__:
+                warnings.warn(
+                    "Using 'method_whitelist' with Retry is deprecated and "
+                    "will be removed in v2.0. Use 'allowed_methods' instead",
+                    DeprecationWarning,
+                )
+                params["method_whitelist"] = self.allowed_methods
+            else:
+                params["allowed_methods"] = self.allowed_methods
+
+        params.update(kw)
+        return type(self)(**params)
+
+    @classmethod
+    def from_int(cls, retries, redirect=True, default=None):
+        """ Backwards-compatibility for the old retries format."""
+        if retries is None:
+            retries = default if default is not None else cls.DEFAULT
+
+        if isinstance(retries, Retry):
+            return retries
+
+        redirect = bool(redirect) and None
+        new_retries = cls(retries, redirect=redirect)
+        log.debug("Converted retries value: %r -> %r", retries, new_retries)
+        return new_retries
+
+    def get_backoff_time(self):
+        """Formula for computing the current backoff
+
+        :rtype: float
+        """
+        # We want to consider only the last consecutive errors sequence (Ignore redirects).
+        consecutive_errors_len = len(
+            list(
+                takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+            )
+        )
+        if consecutive_errors_len <= 1:
+            return 0
+
+        backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+        return min(self.BACKOFF_MAX, backoff_value)
+
+    def parse_retry_after(self, retry_after):
+        # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+        if re.match(r"^\s*[0-9]+\s*$", retry_after):
+            seconds = int(retry_after)
+        else:
+            retry_date_tuple = email.utils.parsedate_tz(retry_after)
+            if retry_date_tuple is None:
+                raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+            if retry_date_tuple[9] is None:  # Python 2
+                # Assume UTC if no timezone was specified
+                # On Python2.7, parsedate_tz returns None for a timezone offset
+                # instead of 0 if no timezone is given, where mktime_tz treats
+                # a None timezone offset as local time.
+                retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
+
+            retry_date = email.utils.mktime_tz(retry_date_tuple)
+            seconds = retry_date - time.time()
+
+        if seconds < 0:
+            seconds = 0
+
+        return seconds
+
+    def get_retry_after(self, response):
+        """ Get the value of Retry-After in seconds. """
+
+        retry_after = response.getheader("Retry-After")
+
+        if retry_after is None:
+            return None
+
+        return self.parse_retry_after(retry_after)
+
+    def sleep_for_retry(self, response=None):
+        retry_after = self.get_retry_after(response)
+        if retry_after:
+            time.sleep(retry_after)
+            return True
+
+        return False
+
+    def _sleep_backoff(self):
+        backoff = self.get_backoff_time()
+        if backoff <= 0:
+            return
+        time.sleep(backoff)
+
+    def sleep(self, response=None):
+        """Sleep between retry attempts.
+
+        This method will respect a server's ``Retry-After`` response header
+        and sleep the duration of the time requested. If that is not present, it
+        will use an exponential backoff. By default, the backoff factor is 0 and
+        this method will return immediately.
+        """
+
+        if self.respect_retry_after_header and response:
+            slept = self.sleep_for_retry(response)
+            if slept:
+                return
+
+        self._sleep_backoff()
+
+    def _is_connection_error(self, err):
+        """Errors when we're fairly sure that the server did not receive the
+        request, so it should be safe to retry.
+        """
+        if isinstance(err, ProxyError):
+            err = err.original_error
+        return isinstance(err, ConnectTimeoutError)
+
+    def _is_read_error(self, err):
+        """Errors that occur after the request has been started, so we should
+        assume that the server began processing it.
+        """
+        return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+    def _is_method_retryable(self, method):
+        """Checks if a given HTTP method should be retried upon, depending if
+        it is included in the allowed_methods
+        """
+        # TODO: For now favor if the Retry implementation sets its own method_whitelist
+        # property outside of our constructor to avoid breaking custom implementations.
+        if "method_whitelist" in self.__dict__:
+            warnings.warn(
+                "Using 'method_whitelist' with Retry is deprecated and "
+                "will be removed in v2.0. Use 'allowed_methods' instead",
+                DeprecationWarning,
+            )
+            allowed_methods = self.method_whitelist
+        else:
+            allowed_methods = self.allowed_methods
+
+        if allowed_methods and method.upper() not in allowed_methods:
+            return False
+        return True
+
+    def is_retry(self, method, status_code, has_retry_after=False):
+        """Is this method/status code retryable? (Based on allowlists and control
+        variables such as the number of total retries to allow, whether to
+        respect the Retry-After header, whether this header is present, and
+        whether the returned status code is on the list of status codes to
+        be retried upon on the presence of the aforementioned header)
+        """
+        if not self._is_method_retryable(method):
+            return False
+
+        if self.status_forcelist and status_code in self.status_forcelist:
+            return True
+
+        return (
+            self.total
+            and self.respect_retry_after_header
+            and has_retry_after
+            and (status_code in self.RETRY_AFTER_STATUS_CODES)
+        )
+
+    def is_exhausted(self):
+        """ Are we out of retries? """
+        retry_counts = (
+            self.total,
+            self.connect,
+            self.read,
+            self.redirect,
+            self.status,
+            self.other,
+        )
+        retry_counts = list(filter(None, retry_counts))
+        if not retry_counts:
+            return False
+
+        return min(retry_counts) < 0
+
+    def increment(
+        self,
+        method=None,
+        url=None,
+        response=None,
+        error=None,
+        _pool=None,
+        _stacktrace=None,
+    ):
+        """Return a new Retry object with incremented retry counters.
+
+        :param response: A response object, or None, if the server did not
+            return a response.
+        :type response: :class:`~urllib3.response.HTTPResponse`
+        :param Exception error: An error encountered during the request, or
+            None if the response was received successfully.
+
+        :return: A new ``Retry`` object.
+        """
+        if self.total is False and error:
+            # Disabled, indicate to re-raise the error.
+            raise six.reraise(type(error), error, _stacktrace)
+
+        total = self.total
+        if total is not None:
+            total -= 1
+
+        connect = self.connect
+        read = self.read
+        redirect = self.redirect
+        status_count = self.status
+        other = self.other
+        cause = "unknown"
+        status = None
+        redirect_location = None
+
+        if error and self._is_connection_error(error):
+            # Connect retry?
+            if connect is False:
+                raise six.reraise(type(error), error, _stacktrace)
+            elif connect is not None:
+                connect -= 1
+
+        elif error and self._is_read_error(error):
+            # Read retry?
+            if read is False or not self._is_method_retryable(method):
+                raise six.reraise(type(error), error, _stacktrace)
+            elif read is not None:
+                read -= 1
+
+        elif error:
+            # Other retry?
+            if other is not None:
+                other -= 1
+
+        elif response and response.get_redirect_location():
+            # Redirect retry?
+            if redirect is not None:
+                redirect -= 1
+            cause = "too many redirects"
+            redirect_location = response.get_redirect_location()
+            status = response.status
+
+        else:
+            # Incrementing because of a server error like a 500 in
+            # status_forcelist and the given method is in the allowed_methods
+            cause = ResponseError.GENERIC_ERROR
+            if response and response.status:
+                if status_count is not None:
+                    status_count -= 1
+                cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+                status = response.status
+
+        history = self.history + (
+            RequestHistory(method, url, error, status, redirect_location),
+        )
+
+        new_retry = self.new(
+            total=total,
+            connect=connect,
+            read=read,
+            redirect=redirect,
+            status=status_count,
+            other=other,
+            history=history,
+        )
+
+        if new_retry.is_exhausted():
+            raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+        log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+
+        return new_retry
+
+    def __repr__(self):
+        return (
+            "{cls.__name__}(total={self.total}, connect={self.connect}, "
+            "read={self.read}, redirect={self.redirect}, status={self.status})"
+        ).format(cls=type(self), self=self)
+
+    def __getattr__(self, item):
+        if item == "method_whitelist":
+            # TODO: Remove this deprecated alias in v2.0
+            warnings.warn(
+                "Using 'method_whitelist' with Retry is deprecated and "
+                "will be removed in v2.0. Use 'allowed_methods' instead",
+                DeprecationWarning,
+            )
+            return self.allowed_methods
+        try:
+            return getattr(super(Retry, self), item)
+        except AttributeError:
+            return getattr(Retry, item)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py b/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py
new file mode 100644
index 00000000..236aa8e6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py
@@ -0,0 +1,474 @@
+from __future__ import absolute_import
+
+import hmac
+import os
+import sys
+import warnings
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from ..exceptions import (
+    InsecurePlatformWarning,
+    ProxySchemeUnsupported,
+    SNIMissingWarning,
+    SSLError,
+)
+from ..packages import six
+from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
+
+SSLContext = None
+SSLTransport = None
+HAS_SNI = False
+IS_PYOPENSSL = False
+IS_SECURETRANSPORT = False
+ALPN_PROTOCOLS = ["http/1.1"]
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+
+
+def _const_compare_digest_backport(a, b):
+    """
+    Compare two digests of equal length in constant time.
+
+    The digests must be of type str/bytes.
+    Returns True if the digests match, and False otherwise.
+    """
+    result = abs(len(a) - len(b))
+    for left, right in zip(bytearray(a), bytearray(b)):
+        result |= left ^ right
+    return result == 0
+
+
+_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+
+try:  # Test for SSL features
+    import ssl
+    from ssl import CERT_REQUIRED, wrap_socket
+except ImportError:
+    pass
+
+try:
+    from ssl import HAS_SNI  # Has SNI?
+except ImportError:
+    pass
+
+try:
+    from .ssltransport import SSLTransport
+except ImportError:
+    pass
+
+
+try:  # Platform-specific: Python 3.6
+    from ssl import PROTOCOL_TLS
+
+    PROTOCOL_SSLv23 = PROTOCOL_TLS
+except ImportError:
+    try:
+        from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+
+        PROTOCOL_SSLv23 = PROTOCOL_TLS
+    except ImportError:
+        PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+
+
+try:
+    from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
+except ImportError:
+    OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+    OP_NO_COMPRESSION = 0x20000
+
+
+try:  # OP_NO_TICKET was added in Python 3.6
+    from ssl import OP_NO_TICKET
+except ImportError:
+    OP_NO_TICKET = 0x4000
+
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
+#   security,
+# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
+# - disable NULL authentication, MD5 MACs, DSS, and other
+#   insecure ciphers for security reasons.
+# - NOTE: TLS 1.3 cipher suites are managed through a different interface
+#   not exposed by CPython (yet!) and are enabled by default if they're available.
+DEFAULT_CIPHERS = ":".join(
+    [
+        "ECDHE+AESGCM",
+        "ECDHE+CHACHA20",
+        "DHE+AESGCM",
+        "DHE+CHACHA20",
+        "ECDH+AESGCM",
+        "DH+AESGCM",
+        "ECDH+AES",
+        "DH+AES",
+        "RSA+AESGCM",
+        "RSA+AES",
+        "!aNULL",
+        "!eNULL",
+        "!MD5",
+        "!DSS",
+    ]
+)
+
+try:
+    from ssl import SSLContext  # Modern SSL?
+except ImportError:
+
+    class SSLContext(object):  # Platform-specific: Python 2
+        def __init__(self, protocol_version):
+            self.protocol = protocol_version
+            # Use default values from a real SSLContext
+            self.check_hostname = False
+            self.verify_mode = ssl.CERT_NONE
+            self.ca_certs = None
+            self.options = 0
+            self.certfile = None
+            self.keyfile = None
+            self.ciphers = None
+
+        def load_cert_chain(self, certfile, keyfile):
+            self.certfile = certfile
+            self.keyfile = keyfile
+
+        def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+            self.ca_certs = cafile
+
+            if capath is not None:
+                raise SSLError("CA directories not supported in older Pythons")
+
+            if cadata is not None:
+                raise SSLError("CA data not supported in older Pythons")
+
+        def set_ciphers(self, cipher_suite):
+            self.ciphers = cipher_suite
+
+        def wrap_socket(self, socket, server_hostname=None, server_side=False):
+            warnings.warn(
+                "A true SSLContext object is not available. This prevents "
+                "urllib3 from configuring SSL appropriately and may cause "
+                "certain SSL connections to fail. You can upgrade to a newer "
+                "version of Python to solve this. For more information, see "
+                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+                "#ssl-warnings",
+                InsecurePlatformWarning,
+            )
+            kwargs = {
+                "keyfile": self.keyfile,
+                "certfile": self.certfile,
+                "ca_certs": self.ca_certs,
+                "cert_reqs": self.verify_mode,
+                "ssl_version": self.protocol,
+                "server_side": server_side,
+            }
+            return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+    """
+    Checks if given fingerprint matches the supplied certificate.
+
+    :param cert:
+        Certificate as bytes object.
+    :param fingerprint:
+        Fingerprint as string of hexdigits, can be interspersed by colons.
+    """
+
+    fingerprint = fingerprint.replace(":", "").lower()
+    digest_length = len(fingerprint)
+    hashfunc = HASHFUNC_MAP.get(digest_length)
+    if not hashfunc:
+        raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
+
+    # We need encode() here for py32; works on py2 and p33.
+    fingerprint_bytes = unhexlify(fingerprint.encode())
+
+    cert_digest = hashfunc(cert).digest()
+
+    if not _const_compare_digest(cert_digest, fingerprint_bytes):
+        raise SSLError(
+            'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
+                fingerprint, hexlify(cert_digest)
+            )
+        )
+
+
+def resolve_cert_reqs(candidate):
+    """
+    Resolves the argument to a numeric constant, which can be passed to
+    the wrap_socket function/method from the ssl module.
+    Defaults to :data:`ssl.CERT_REQUIRED`.
+    If given a string it is assumed to be the name of the constant in the
+    :mod:`ssl` module or its abbreviation.
+    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+    If it's neither `None` nor a string we assume it is already the numeric
+    constant which can directly be passed to wrap_socket.
+    """
+    if candidate is None:
+        return CERT_REQUIRED
+
+    if isinstance(candidate, str):
+        res = getattr(ssl, candidate, None)
+        if res is None:
+            res = getattr(ssl, "CERT_" + candidate)
+        return res
+
+    return candidate
+
+
+def resolve_ssl_version(candidate):
+    """
+    like resolve_cert_reqs
+    """
+    if candidate is None:
+        return PROTOCOL_TLS
+
+    if isinstance(candidate, str):
+        res = getattr(ssl, candidate, None)
+        if res is None:
+            res = getattr(ssl, "PROTOCOL_" + candidate)
+        return res
+
+    return candidate
+
+
+def create_urllib3_context(
+    ssl_version=None, cert_reqs=None, options=None, ciphers=None
+):
+    """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+    By default, this function does a lot of the same work that
+    ``ssl.create_default_context`` does on Python 3.4+. It:
+
+    - Disables SSLv2, SSLv3, and compression
+    - Sets a restricted set of server ciphers
+
+    If you wish to enable SSLv3, you can do::
+
+        from urllib3.util import ssl_
+        context = ssl_.create_urllib3_context()
+        context.options &= ~ssl_.OP_NO_SSLv3
+
+    You can do the same to enable compression (substituting ``COMPRESSION``
+    for ``SSLv3`` in the last line above).
+
+    :param ssl_version:
+        The desired protocol version to use. This will default to
+        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+        the server and your installation of OpenSSL support.
+    :param cert_reqs:
+        Whether to require the certificate verification. This defaults to
+        ``ssl.CERT_REQUIRED``.
+    :param options:
+        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+    :param ciphers:
+        Which cipher suites to allow the server to select.
+    :returns:
+        Constructed SSLContext object with specified options
+    :rtype: SSLContext
+    """
+    context = SSLContext(ssl_version or PROTOCOL_TLS)
+
+    context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+    # Setting the default here, as we may have no ssl module on import
+    cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+    if options is None:
+        options = 0
+        # SSLv2 is easily broken and is considered harmful and dangerous
+        options |= OP_NO_SSLv2
+        # SSLv3 has several problems and is now dangerous
+        options |= OP_NO_SSLv3
+        # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+        # (issue #309)
+        options |= OP_NO_COMPRESSION
+        # TLSv1.2 only. Unless set explicitly, do not request tickets.
+        # This may save some bandwidth on wire, and although the ticket is encrypted,
+        # there is a risk associated with it being on wire,
+        # if the server is not rotating its ticketing keys properly.
+        options |= OP_NO_TICKET
+
+    context.options |= options
+
+    # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+    # necessary for conditional client cert authentication with TLS 1.3.
+    # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
+    # versions of Python.  We only enable on Python 3.7.4+ or if certificate
+    # verification is enabled to work around Python issue #37428
+    # See: https://bugs.python.org/issue37428
+    if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
+        context, "post_handshake_auth", None
+    ) is not None:
+        context.post_handshake_auth = True
+
+    context.verify_mode = cert_reqs
+    if (
+        getattr(context, "check_hostname", None) is not None
+    ):  # Platform-specific: Python 3.2
+        # We do our own verification, including fingerprints and alternative
+        # hostnames. So disable it here
+        context.check_hostname = False
+
+    # Enable logging of TLS session keys via defacto standard environment variable
+    # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
+    if hasattr(context, "keylog_filename"):
+        sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
+        if sslkeylogfile:
+            context.keylog_filename = sslkeylogfile
+
+    return context
+
+
+def ssl_wrap_socket(
+    sock,
+    keyfile=None,
+    certfile=None,
+    cert_reqs=None,
+    ca_certs=None,
+    server_hostname=None,
+    ssl_version=None,
+    ciphers=None,
+    ssl_context=None,
+    ca_cert_dir=None,
+    key_password=None,
+    ca_cert_data=None,
+    tls_in_tls=False,
+):
+    """
+    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+    the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+    :param server_hostname:
+        When SNI is supported, the expected hostname of the certificate
+    :param ssl_context:
+        A pre-made :class:`SSLContext` object. If none is provided, one will
+        be created using :func:`create_urllib3_context`.
+    :param ciphers:
+        A string of ciphers we wish the client to support.
+    :param ca_cert_dir:
+        A directory containing CA certificates in multiple separate files, as
+        supported by OpenSSL's -CApath flag or the capath argument to
+        SSLContext.load_verify_locations().
+    :param key_password:
+        Optional password if the keyfile is encrypted.
+    :param ca_cert_data:
+        Optional string containing CA certificates in PEM format suitable for
+        passing as the cadata parameter to SSLContext.load_verify_locations()
+    :param tls_in_tls:
+        Use SSLTransport to wrap the existing socket.
+    """
+    context = ssl_context
+    if context is None:
+        # Note: This branch of code and all the variables in it are no longer
+        # used by urllib3 itself. We should consider deprecating and removing
+        # this code.
+        context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+
+    if ca_certs or ca_cert_dir or ca_cert_data:
+        try:
+            context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+        except (IOError, OSError) as e:
+            raise SSLError(e)
+
+    elif ssl_context is None and hasattr(context, "load_default_certs"):
+        # try to load OS default certs; works well on Windows (require Python3.4+)
+        context.load_default_certs()
+
+    # Attempt to detect if we get the goofy behavior of the
+    # keyfile being encrypted and OpenSSL asking for the
+    # passphrase via the terminal and instead error out.
+    if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+        raise SSLError("Client private key is encrypted, password is required")
+
+    if certfile:
+        if key_password is None:
+            context.load_cert_chain(certfile, keyfile)
+        else:
+            context.load_cert_chain(certfile, keyfile, key_password)
+
+    try:
+        if hasattr(context, "set_alpn_protocols"):
+            context.set_alpn_protocols(ALPN_PROTOCOLS)
+    except NotImplementedError:
+        pass
+
+    # If we detect server_hostname is an IP address then the SNI
+    # extension should not be used according to RFC3546 Section 3.1
+    use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
+    # SecureTransport uses server_hostname in certificate verification.
+    send_sni = (use_sni_hostname and HAS_SNI) or (
+        IS_SECURETRANSPORT and server_hostname
+    )
+    # Do not warn the user if server_hostname is an invalid SNI hostname.
+    if not HAS_SNI and use_sni_hostname:
+        warnings.warn(
+            "An HTTPS request has been made, but the SNI (Server Name "
+            "Indication) extension to TLS is not available on this platform. "
+            "This may cause the server to present an incorrect TLS "
+            "certificate, which can cause validation failures. You can upgrade to "
+            "a newer version of Python to solve this. For more information, see "
+            "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+            "#ssl-warnings",
+            SNIMissingWarning,
+        )
+
+    if send_sni:
+        ssl_sock = _ssl_wrap_socket_impl(
+            sock, context, tls_in_tls, server_hostname=server_hostname
+        )
+    else:
+        ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
+    return ssl_sock
+
+
+def is_ipaddress(hostname):
+    """Detects whether the hostname given is an IPv4 or IPv6 address.
+    Also detects IPv6 addresses with Zone IDs.
+
+    :param str hostname: Hostname to examine.
+    :return: True if the hostname is an IP address, False otherwise.
+    """
+    if not six.PY2 and isinstance(hostname, bytes):
+        # IDN A-label bytes are ASCII compatible.
+        hostname = hostname.decode("ascii")
+    return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
+
+
+def _is_key_file_encrypted(key_file):
+    """Detects if a key file is encrypted or not."""
+    with open(key_file, "r") as f:
+        for line in f:
+            # Look for Proc-Type: 4,ENCRYPTED
+            if "ENCRYPTED" in line:
+                return True
+
+    return False
+
+
+def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
+    if tls_in_tls:
+        if not SSLTransport:
+            # Import error, ssl is not available.
+            raise ProxySchemeUnsupported(
+                "TLS in TLS requires support for the 'ssl' module"
+            )
+
+        SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
+        return SSLTransport(sock, ssl_context, server_hostname)
+
+    if server_hostname:
+        return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
+    else:
+        return ssl_context.wrap_socket(sock)
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/ssltransport.py b/venv/lib/python3.7/site-packages/urllib3/util/ssltransport.py
new file mode 100644
index 00000000..1e41354f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/ssltransport.py
@@ -0,0 +1,221 @@
+import io
+import socket
+import ssl
+
+from urllib3.exceptions import ProxySchemeUnsupported
+from urllib3.packages import six
+
+SSL_BLOCKSIZE = 16384
+
+
+class SSLTransport:
+    """
+    The SSLTransport wraps an existing socket and establishes an SSL connection.
+
+    Contrary to Python's implementation of SSLSocket, it allows you to chain
+    multiple TLS connections together. It's particularly useful if you need to
+    implement TLS within TLS.
+
+    The class supports most of the socket API operations.
+    """
+
+    @staticmethod
+    def _validate_ssl_context_for_tls_in_tls(ssl_context):
+        """
+        Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
+        for TLS in TLS.
+
+        The only requirement is that the ssl_context provides the 'wrap_bio'
+        methods.
+        """
+
+        if not hasattr(ssl_context, "wrap_bio"):
+            if six.PY2:
+                raise ProxySchemeUnsupported(
+                    "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+                    "supported on Python 2"
+                )
+            else:
+                raise ProxySchemeUnsupported(
+                    "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+                    "available on non-native SSLContext"
+                )
+
+    def __init__(
+        self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
+    ):
+        """
+        Create an SSLTransport around socket using the provided ssl_context.
+        """
+        self.incoming = ssl.MemoryBIO()
+        self.outgoing = ssl.MemoryBIO()
+
+        self.suppress_ragged_eofs = suppress_ragged_eofs
+        self.socket = socket
+
+        self.sslobj = ssl_context.wrap_bio(
+            self.incoming, self.outgoing, server_hostname=server_hostname
+        )
+
+        # Perform initial handshake.
+        self._ssl_io_loop(self.sslobj.do_handshake)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *_):
+        self.close()
+
+    def fileno(self):
+        return self.socket.fileno()
+
+    def read(self, len=1024, buffer=None):
+        return self._wrap_ssl_read(len, buffer)
+
+    def recv(self, len=1024, flags=0):
+        if flags != 0:
+            raise ValueError("non-zero flags not allowed in calls to recv")
+        return self._wrap_ssl_read(len)
+
+    def recv_into(self, buffer, nbytes=None, flags=0):
+        if flags != 0:
+            raise ValueError("non-zero flags not allowed in calls to recv_into")
+        if buffer and (nbytes is None):
+            nbytes = len(buffer)
+        elif nbytes is None:
+            nbytes = 1024
+        return self.read(nbytes, buffer)
+
+    def sendall(self, data, flags=0):
+        if flags != 0:
+            raise ValueError("non-zero flags not allowed in calls to sendall")
+        count = 0
+        with memoryview(data) as view, view.cast("B") as byte_view:
+            amount = len(byte_view)
+            while count < amount:
+                v = self.send(byte_view[count:])
+                count += v
+
+    def send(self, data, flags=0):
+        if flags != 0:
+            raise ValueError("non-zero flags not allowed in calls to send")
+        response = self._ssl_io_loop(self.sslobj.write, data)
+        return response
+
+    def makefile(
+        self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+    ):
+        """
+        Python's httpclient uses makefile and buffered io when reading HTTP
+        messages and we need to support it.
+
+        This is unfortunately a copy and paste of socket.py makefile with small
+        changes to point to the socket directly.
+        """
+        if not set(mode) <= {"r", "w", "b"}:
+            raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+
+        writing = "w" in mode
+        reading = "r" in mode or not writing
+        assert reading or writing
+        binary = "b" in mode
+        rawmode = ""
+        if reading:
+            rawmode += "r"
+        if writing:
+            rawmode += "w"
+        raw = socket.SocketIO(self, rawmode)
+        self.socket._io_refs += 1
+        if buffering is None:
+            buffering = -1
+        if buffering < 0:
+            buffering = io.DEFAULT_BUFFER_SIZE
+        if buffering == 0:
+            if not binary:
+                raise ValueError("unbuffered streams must be binary")
+            return raw
+        if reading and writing:
+            buffer = io.BufferedRWPair(raw, raw, buffering)
+        elif reading:
+            buffer = io.BufferedReader(raw, buffering)
+        else:
+            assert writing
+            buffer = io.BufferedWriter(raw, buffering)
+        if binary:
+            return buffer
+        text = io.TextIOWrapper(buffer, encoding, errors, newline)
+        text.mode = mode
+        return text
+
+    def unwrap(self):
+        self._ssl_io_loop(self.sslobj.unwrap)
+
+    def close(self):
+        self.socket.close()
+
+    def getpeercert(self, binary_form=False):
+        return self.sslobj.getpeercert(binary_form)
+
+    def version(self):
+        return self.sslobj.version()
+
+    def cipher(self):
+        return self.sslobj.cipher()
+
+    def selected_alpn_protocol(self):
+        return self.sslobj.selected_alpn_protocol()
+
+    def selected_npn_protocol(self):
+        return self.sslobj.selected_npn_protocol()
+
+    def shared_ciphers(self):
+        return self.sslobj.shared_ciphers()
+
+    def compression(self):
+        return self.sslobj.compression()
+
+    def settimeout(self, value):
+        self.socket.settimeout(value)
+
+    def gettimeout(self):
+        return self.socket.gettimeout()
+
+    def _decref_socketios(self):
+        self.socket._decref_socketios()
+
+    def _wrap_ssl_read(self, len, buffer=None):
+        try:
+            return self._ssl_io_loop(self.sslobj.read, len, buffer)
+        except ssl.SSLError as e:
+            if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
+                return 0  # eof, return 0.
+            else:
+                raise
+
+    def _ssl_io_loop(self, func, *args):
+        """ Performs an I/O loop between incoming/outgoing and the socket."""
+        should_loop = True
+        ret = None
+
+        while should_loop:
+            errno = None
+            try:
+                ret = func(*args)
+            except ssl.SSLError as e:
+                if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
+                    # WANT_READ, and WANT_WRITE are expected, others are not.
+                    raise e
+                errno = e.errno
+
+            buf = self.outgoing.read()
+            self.socket.sendall(buf)
+
+            if errno is None:
+                should_loop = False
+            elif errno == ssl.SSL_ERROR_WANT_READ:
+                buf = self.socket.recv(SSL_BLOCKSIZE)
+                if buf:
+                    self.incoming.write(buf)
+                else:
+                    self.incoming.write_eof()
+        return ret
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/timeout.py b/venv/lib/python3.7/site-packages/urllib3/util/timeout.py
new file mode 100644
index 00000000..ff69593b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/timeout.py
@@ -0,0 +1,268 @@
+from __future__ import absolute_import
+
+import time
+
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+# Use time.monotonic if available.
+current_time = getattr(time, "monotonic", time.time)
+
+
+class Timeout(object):
+    """Timeout configuration.
+
+    Timeouts can be defined as a default for a pool:
+
+    .. code-block:: python
+
+       timeout = Timeout(connect=2.0, read=7.0)
+       http = PoolManager(timeout=timeout)
+       response = http.request('GET', 'http://example.com/')
+
+    Or per-request (which overrides the default for the pool):
+
+    .. code-block:: python
+
+       response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+    Timeouts can be disabled by setting all the parameters to ``None``:
+
+    .. code-block:: python
+
+       no_timeout = Timeout(connect=None, read=None)
+       response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+    :param total:
+        This combines the connect and read timeouts into one; the read timeout
+        will be set to the time leftover from the connect attempt. In the
+        event that both a connect timeout and a total are specified, or a read
+        timeout and a total are specified, the shorter timeout will be applied.
+
+        Defaults to None.
+
+    :type total: int, float, or None
+
+    :param connect:
+        The maximum amount of time (in seconds) to wait for a connection
+        attempt to a server to succeed. Omitting the parameter will default the
+        connect timeout to the system default, probably `the global default
+        timeout in socket.py
+        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+        None will set an infinite timeout for connection attempts.
+
+    :type connect: int, float, or None
+
+    :param read:
+        The maximum amount of time (in seconds) to wait between consecutive
+        read operations for a response from the server. Omitting the parameter
+        will default the read timeout to the system default, probably `the
+        global default timeout in socket.py
+        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+        None will set an infinite timeout.
+
+    :type read: int, float, or None
+
+    .. note::
+
+        Many factors can affect the total amount of time for urllib3 to return
+        an HTTP response.
+
+        For example, Python's DNS resolver does not obey the timeout specified
+        on the socket. Other factors that can affect total request time include
+        high CPU load, high swap, the program running at a low priority level,
+        or other behaviors.
+
+        In addition, the read and total timeouts only measure the time between
+        read operations on the socket connecting the client and the server,
+        not the total amount of time for the request to return a complete
+        response. For most requests, the timeout is raised because the server
+        has not sent the first byte in the specified time. This is not always
+        the case; if a server streams one byte every fifteen seconds, a timeout
+        of 20 seconds will not trigger, even though the request will take
+        several minutes to complete.
+
+        If your goal is to cut off any request after a set amount of wall clock
+        time, consider having a second "watcher" thread to cut off a slow
+        request.
+    """
+
+    #: A sentinel object representing the default timeout value
+    DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+    def __init__(self, total=None, connect=_Default, read=_Default):
+        self._connect = self._validate_timeout(connect, "connect")
+        self._read = self._validate_timeout(read, "read")
+        self.total = self._validate_timeout(total, "total")
+        self._start_connect = None
+
+    def __repr__(self):
+        return "%s(connect=%r, read=%r, total=%r)" % (
+            type(self).__name__,
+            self._connect,
+            self._read,
+            self.total,
+        )
+
+    # __str__ provided for backwards compatibility
+    __str__ = __repr__
+
+    @classmethod
+    def _validate_timeout(cls, value, name):
+        """Check that a timeout attribute is valid.
+
+        :param value: The timeout value to validate
+        :param name: The name of the timeout attribute to validate. This is
+            used to specify in error messages.
+        :return: The validated and casted version of the given value.
+        :raises ValueError: If it is a numeric value less than or equal to
+            zero, or the type is not an integer, float, or None.
+        """
+        if value is _Default:
+            return cls.DEFAULT_TIMEOUT
+
+        if value is None or value is cls.DEFAULT_TIMEOUT:
+            return value
+
+        if isinstance(value, bool):
+            raise ValueError(
+                "Timeout cannot be a boolean value. It must "
+                "be an int, float or None."
+            )
+        try:
+            float(value)
+        except (TypeError, ValueError):
+            raise ValueError(
+                "Timeout value %s was %s, but it must be an "
+                "int, float or None." % (name, value)
+            )
+
+        try:
+            if value <= 0:
+                raise ValueError(
+                    "Attempted to set %s timeout to %s, but the "
+                    "timeout cannot be set to a value less "
+                    "than or equal to 0." % (name, value)
+                )
+        except TypeError:
+            # Python 3
+            raise ValueError(
+                "Timeout value %s was %s, but it must be an "
+                "int, float or None." % (name, value)
+            )
+
+        return value
+
+    @classmethod
+    def from_float(cls, timeout):
+        """Create a new Timeout from a legacy timeout value.
+
+        The timeout value used by httplib.py sets the same timeout on the
+        connect(), and recv() socket requests. This creates a :class:`Timeout`
+        object that sets the individual timeouts to the ``timeout`` value
+        passed to this function.
+
+        :param timeout: The legacy timeout value.
+        :type timeout: integer, float, sentinel default object, or None
+        :return: Timeout object
+        :rtype: :class:`Timeout`
+        """
+        return Timeout(read=timeout, connect=timeout)
+
+    def clone(self):
+        """Create a copy of the timeout object
+
+        Timeout properties are stored per-pool but each request needs a fresh
+        Timeout object to ensure each one has its own start/stop configured.
+
+        :return: a copy of the timeout object
+        :rtype: :class:`Timeout`
+        """
+        # We can't use copy.deepcopy because that will also create a new object
+        # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+        # detect the user default.
+        return Timeout(connect=self._connect, read=self._read, total=self.total)
+
+    def start_connect(self):
+        """Start the timeout clock, used during a connect() attempt
+
+        :raises urllib3.exceptions.TimeoutStateError: if you attempt
+            to start a timer that has been started already.
+        """
+        if self._start_connect is not None:
+            raise TimeoutStateError("Timeout timer has already been started.")
+        self._start_connect = current_time()
+        return self._start_connect
+
+    def get_connect_duration(self):
+        """Gets the time elapsed since the call to :meth:`start_connect`.
+
+        :return: Elapsed time in seconds.
+        :rtype: float
+        :raises urllib3.exceptions.TimeoutStateError: if you attempt
+            to get duration for a timer that hasn't been started.
+        """
+        if self._start_connect is None:
+            raise TimeoutStateError(
+                "Can't get connect duration for timer that has not started."
+            )
+        return current_time() - self._start_connect
+
+    @property
+    def connect_timeout(self):
+        """Get the value to use when setting a connection timeout.
+
+        This will be a positive float or integer, the value None
+        (never timeout), or the default system timeout.
+
+        :return: Connect timeout.
+        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+        """
+        if self.total is None:
+            return self._connect
+
+        if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+            return self.total
+
+        return min(self._connect, self.total)
+
+    @property
+    def read_timeout(self):
+        """Get the value for the read timeout.
+
+        This assumes some time has elapsed in the connection timeout and
+        computes the read timeout appropriately.
+
+        If self.total is set, the read timeout is dependent on the amount of
+        time taken by the connect timeout. If the connection time has not been
+        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+        raised.
+
+        :return: Value to use for the read timeout.
+        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+            has not yet been called on this object.
+        """
+        if (
+            self.total is not None
+            and self.total is not self.DEFAULT_TIMEOUT
+            and self._read is not None
+            and self._read is not self.DEFAULT_TIMEOUT
+        ):
+            # In case the connect timeout has not yet been established.
+            if self._start_connect is None:
+                return self._read
+            return max(0, min(self.total - self.get_connect_duration(), self._read))
+        elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+            return max(0, self.total - self.get_connect_duration())
+        else:
+            return self._read
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/url.py b/venv/lib/python3.7/site-packages/urllib3/util/url.py
new file mode 100644
index 00000000..6ff238fe
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/url.py
@@ -0,0 +1,430 @@
+from __future__ import absolute_import
+
+import re
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+from ..packages import six
+
+url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
+
+# We only want to normalize urls with an HTTP(S) scheme.
+# urllib3 infers URLs without a scheme (None) to be http.
+NORMALIZABLE_SCHEMES = ("http", "https", None)
+
+# Almost all of these patterns were derived from the
+# 'rfc3986' module: https://github.com/python-hyper/rfc3986
+PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
+SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
+URI_RE = re.compile(
+    r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
+    r"(?://([^\\/?#]*))?"
+    r"([^?#]*)"
+    r"(?:\?([^#]*))?"
+    r"(?:#(.*))?$",
+    re.UNICODE | re.DOTALL,
+)
+
+IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+HEX_PAT = "[0-9A-Fa-f]{1,4}"
+LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
+_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
+_variations = [
+    #                            6( h16 ":" ) ls32
+    "(?:%(hex)s:){6}%(ls32)s",
+    #                       "::" 5( h16 ":" ) ls32
+    "::(?:%(hex)s:){5}%(ls32)s",
+    # [               h16 ] "::" 4( h16 ":" ) ls32
+    "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+    # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+    "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+    # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+    "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+    # [ *3( h16 ":" ) h16 ] "::"    h16 ":"   ls32
+    "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+    # [ *4( h16 ":" ) h16 ] "::"              ls32
+    "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+    # [ *5( h16 ":" ) h16 ] "::"              h16
+    "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+    # [ *6( h16 ":" ) h16 ] "::"
+    "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+
+IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+IPV6_RE = re.compile("^" + IPV6_PAT + "$")
+IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
+ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
+
+SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
+    REG_NAME_PAT,
+    IPV4_PAT,
+    IPV6_ADDRZ_PAT,
+)
+SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
+
+UNRESERVED_CHARS = set(
+    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+)
+SUB_DELIM_CHARS = set("!$&'()*+,;=")
+USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
+PATH_CHARS = USERINFO_CHARS | {"@", "/"}
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
+
+
+class Url(namedtuple("Url", url_attrs)):
+    """
+    Data structure for representing an HTTP URL. Used as a return value for
+    :func:`parse_url`. Both the scheme and host are normalized as they are
+    both case-insensitive according to RFC 3986.
+    """
+
+    __slots__ = ()
+
+    def __new__(
+        cls,
+        scheme=None,
+        auth=None,
+        host=None,
+        port=None,
+        path=None,
+        query=None,
+        fragment=None,
+    ):
+        if path and not path.startswith("/"):
+            path = "/" + path
+        if scheme is not None:
+            scheme = scheme.lower()
+        return super(Url, cls).__new__(
+            cls, scheme, auth, host, port, path, query, fragment
+        )
+
+    @property
+    def hostname(self):
+        """For backwards-compatibility with urlparse. We're nice like that."""
+        return self.host
+
+    @property
+    def request_uri(self):
+        """Absolute path including the query string."""
+        uri = self.path or "/"
+
+        if self.query is not None:
+            uri += "?" + self.query
+
+        return uri
+
+    @property
+    def netloc(self):
+        """Network location including host and port"""
+        if self.port:
+            return "%s:%d" % (self.host, self.port)
+        return self.host
+
+    @property
+    def url(self):
+        """
+        Convert self into a url
+
+        This function should more or less round-trip with :func:`.parse_url`. The
+        returned url may not be exactly the same as the url inputted to
+        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+        with a blank port will have : removed).
+
+        Example: ::
+
+            >>> U = parse_url('http://google.com/mail/')
+            >>> U.url
+            'http://google.com/mail/'
+            >>> Url('http', 'username:password', 'host.com', 80,
+            ... '/path', 'query', 'fragment').url
+            'http://username:password@host.com:80/path?query#fragment'
+        """
+        scheme, auth, host, port, path, query, fragment = self
+        url = u""
+
+        # We use "is not None" we want things to happen with empty strings (or 0 port)
+        if scheme is not None:
+            url += scheme + u"://"
+        if auth is not None:
+            url += auth + u"@"
+        if host is not None:
+            url += host
+        if port is not None:
+            url += u":" + str(port)
+        if path is not None:
+            url += path
+        if query is not None:
+            url += u"?" + query
+        if fragment is not None:
+            url += u"#" + fragment
+
+        return url
+
+    def __str__(self):
+        return self.url
+
+
+def split_first(s, delims):
+    """
+    .. deprecated:: 1.25
+
+    Given a string and an iterable of delimiters, split on the first found
+    delimiter. Return two split parts and the matched delimiter.
+
+    If not found, then the first part is the full input string.
+
+    Example::
+
+        >>> split_first('foo/bar?baz', '?/=')
+        ('foo', 'bar?baz', '/')
+        >>> split_first('foo/bar?baz', '123')
+        ('foo/bar?baz', '', None)
+
+    Scales linearly with number of delims. Not ideal for large number of delims.
+    """
+    min_idx = None
+    min_delim = None
+    for d in delims:
+        idx = s.find(d)
+        if idx < 0:
+            continue
+
+        if min_idx is None or idx < min_idx:
+            min_idx = idx
+            min_delim = d
+
+    if min_idx is None or min_idx < 0:
+        return s, "", None
+
+    return s[:min_idx], s[min_idx + 1 :], min_delim
+
+
+def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
+    """Percent-encodes a URI component without reapplying
+    onto an already percent-encoded component.
+    """
+    if component is None:
+        return component
+
+    component = six.ensure_text(component)
+
+    # Normalize existing percent-encoded bytes.
+    # Try to see if the component we're encoding is already percent-encoded
+    # so we can skip all '%' characters but still encode all others.
+    component, percent_encodings = PERCENT_RE.subn(
+        lambda match: match.group(0).upper(), component
+    )
+
+    uri_bytes = component.encode("utf-8", "surrogatepass")
+    is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
+    encoded_component = bytearray()
+
+    for i in range(0, len(uri_bytes)):
+        # Will return a single character bytestring on both Python 2 & 3
+        byte = uri_bytes[i : i + 1]
+        byte_ord = ord(byte)
+        if (is_percent_encoded and byte == b"%") or (
+            byte_ord < 128 and byte.decode() in allowed_chars
+        ):
+            encoded_component += byte
+            continue
+        encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
+
+    return encoded_component.decode(encoding)
+
+
+def _remove_path_dot_segments(path):
+    # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+    segments = path.split("/")  # Turn the path into a list of segments
+    output = []  # Initialize the variable to use to store output
+
+    for segment in segments:
+        # '.' is the current directory, so ignore it, it is superfluous
+        if segment == ".":
+            continue
+        # Anything other than '..', should be appended to the output
+        elif segment != "..":
+            output.append(segment)
+        # In this case segment == '..', if we can, we should pop the last
+        # element
+        elif output:
+            output.pop()
+
+    # If the path starts with '/' and the output is empty or the first string
+    # is non-empty
+    if path.startswith("/") and (not output or output[0]):
+        output.insert(0, "")
+
+    # If the path starts with '/.' or '/..' ensure we add one more empty
+    # string to add a trailing '/'
+    if path.endswith(("/.", "/..")):
+        output.append("")
+
+    return "/".join(output)
+
+
+def _normalize_host(host, scheme):
+    if host:
+        if isinstance(host, six.binary_type):
+            host = six.ensure_str(host)
+
+        if scheme in NORMALIZABLE_SCHEMES:
+            is_ipv6 = IPV6_ADDRZ_RE.match(host)
+            if is_ipv6:
+                match = ZONE_ID_RE.search(host)
+                if match:
+                    start, end = match.span(1)
+                    zone_id = host[start:end]
+
+                    if zone_id.startswith("%25") and zone_id != "%25":
+                        zone_id = zone_id[3:]
+                    else:
+                        zone_id = zone_id[1:]
+                    zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
+                    return host[:start].lower() + zone_id + host[end:]
+                else:
+                    return host.lower()
+            elif not IPV4_RE.match(host):
+                return six.ensure_str(
+                    b".".join([_idna_encode(label) for label in host.split(".")])
+                )
+    return host
+
+
+def _idna_encode(name):
+    if name and any([ord(x) > 128 for x in name]):
+        try:
+            import idna
+        except ImportError:
+            six.raise_from(
+                LocationParseError("Unable to parse URL without the 'idna' module"),
+                None,
+            )
+        try:
+            return idna.encode(name.lower(), strict=True, std3_rules=True)
+        except idna.IDNAError:
+            six.raise_from(
+                LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
+            )
+    return name.lower().encode("ascii")
+
+
+def _encode_target(target):
+    """Percent-encodes a request target so that there are no invalid characters"""
+    path, query = TARGET_RE.match(target).groups()
+    target = _encode_invalid_chars(path, PATH_CHARS)
+    query = _encode_invalid_chars(query, QUERY_CHARS)
+    if query is not None:
+        target += "?" + query
+    return target
+
+
+def parse_url(url):
+    """
+    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+    performed to parse incomplete urls. Fields not provided will be None.
+    This parser is RFC 3986 compliant.
+
+    The parser logic and helper functions are based heavily on
+    work done in the ``rfc3986`` module.
+
+    :param str url: URL to parse into a :class:`.Url` namedtuple.
+
+    Partly backwards-compatible with :mod:`urlparse`.
+
+    Example::
+
+        >>> parse_url('http://google.com/mail/')
+        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+        >>> parse_url('google.com:80')
+        Url(scheme=None, host='google.com', port=80, path=None, ...)
+        >>> parse_url('/foo?bar')
+        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+    """
+    if not url:
+        # Empty
+        return Url()
+
+    source_url = url
+    if not SCHEME_RE.search(url):
+        url = "//" + url
+
+    try:
+        scheme, authority, path, query, fragment = URI_RE.match(url).groups()
+        normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
+
+        if scheme:
+            scheme = scheme.lower()
+
+        if authority:
+            auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
+            if auth and normalize_uri:
+                auth = _encode_invalid_chars(auth, USERINFO_CHARS)
+            if port == "":
+                port = None
+        else:
+            auth, host, port = None, None, None
+
+        if port is not None:
+            port = int(port)
+            if not (0 <= port <= 65535):
+                raise LocationParseError(url)
+
+        host = _normalize_host(host, scheme)
+
+        if normalize_uri and path:
+            path = _remove_path_dot_segments(path)
+            path = _encode_invalid_chars(path, PATH_CHARS)
+        if normalize_uri and query:
+            query = _encode_invalid_chars(query, QUERY_CHARS)
+        if normalize_uri and fragment:
+            fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
+
+    except (ValueError, AttributeError):
+        return six.raise_from(LocationParseError(source_url), None)
+
+    # For the sake of backwards compatibility we put empty
+    # string values for path if there are any defined values
+    # beyond the path in the URL.
+    # TODO: Remove this when we break backwards compatibility.
+    if not path:
+        if query is not None or fragment is not None:
+            path = ""
+        else:
+            path = None
+
+    # Ensure that each part of the URL is a `str` for
+    # backwards compatibility.
+    if isinstance(url, six.text_type):
+        ensure_func = six.ensure_text
+    else:
+        ensure_func = six.ensure_str
+
+    def ensure_type(x):
+        return x if x is None else ensure_func(x)
+
+    return Url(
+        scheme=ensure_type(scheme),
+        auth=ensure_type(auth),
+        host=ensure_type(host),
+        port=port,
+        path=ensure_type(path),
+        query=ensure_type(query),
+        fragment=ensure_type(fragment),
+    )
+
+
+def get_host(url):
+    """
+    Deprecated. Use :func:`parse_url` instead.
+    """
+    p = parse_url(url)
+    return p.scheme or "http", p.hostname, p.port
diff --git a/venv/lib/python3.7/site-packages/urllib3/util/wait.py b/venv/lib/python3.7/site-packages/urllib3/util/wait.py
new file mode 100644
index 00000000..c280646c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/urllib3/util/wait.py
@@ -0,0 +1,153 @@
+import errno
+import select
+import sys
+from functools import partial
+
+try:
+    from time import monotonic
+except ImportError:
+    from time import time as monotonic
+
+__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+
+
+class NoWayToWaitForSocketError(Exception):
+    pass
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+if sys.version_info >= (3, 5):
+    # Modern Python, that retries syscalls by default
+    def _retry_on_intr(fn, timeout):
+        return fn(timeout)
+
+
+else:
+    # Old and broken Pythons.
+    def _retry_on_intr(fn, timeout):
+        if timeout is None:
+            deadline = float("inf")
+        else:
+            deadline = monotonic() + timeout
+
+        while True:
+            try:
+                return fn(timeout)
+            # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
+            except (OSError, select.error) as e:
+                # 'e.args[0]' incantation works for both OSError and select.error
+                if e.args[0] != errno.EINTR:
+                    raise
+                else:
+                    timeout = deadline - monotonic()
+                    if timeout < 0:
+                        timeout = 0
+                    if timeout == float("inf"):
+                        timeout = None
+                    continue
+
+
+def select_wait_for_socket(sock, read=False, write=False, timeout=None):
+    if not read and not write:
+        raise RuntimeError("must specify at least one of read=True, write=True")
+    rcheck = []
+    wcheck = []
+    if read:
+        rcheck.append(sock)
+    if write:
+        wcheck.append(sock)
+    # When doing a non-blocking connect, most systems signal success by
+    # marking the socket writable. Windows, though, signals success by marked
+    # it as "exceptional". We paper over the difference by checking the write
+    # sockets for both conditions. (The stdlib selectors module does the same
+    # thing.)
+    fn = partial(select.select, rcheck, wcheck, wcheck)
+    rready, wready, xready = _retry_on_intr(fn, timeout)
+    return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
+    if not read and not write:
+        raise RuntimeError("must specify at least one of read=True, write=True")
+    mask = 0
+    if read:
+        mask |= select.POLLIN
+    if write:
+        mask |= select.POLLOUT
+    poll_obj = select.poll()
+    poll_obj.register(sock, mask)
+
+    # For some reason, poll() takes timeout in milliseconds
+    def do_poll(t):
+        if t is not None:
+            t *= 1000
+        return poll_obj.poll(t)
+
+    return bool(_retry_on_intr(do_poll, timeout))
+
+
+def null_wait_for_socket(*args, **kwargs):
+    raise NoWayToWaitForSocketError("no select-equivalent available")
+
+
+def _have_working_poll():
+    # Apparently some systems have a select.poll that fails as soon as you try
+    # to use it, either due to strange configuration or broken monkeypatching
+    # from libraries like eventlet/greenlet.
+    try:
+        poll_obj = select.poll()
+        _retry_on_intr(poll_obj.poll, 0)
+    except (AttributeError, OSError):
+        return False
+    else:
+        return True
+
+
+def wait_for_socket(*args, **kwargs):
+    # We delay choosing which implementation to use until the first time we're
+    # called. We could do it at import time, but then we might make the wrong
+    # decision if someone goes wild with monkeypatching select.poll after
+    # we're imported.
+    global wait_for_socket
+    if _have_working_poll():
+        wait_for_socket = poll_wait_for_socket
+    elif hasattr(select, "select"):
+        wait_for_socket = select_wait_for_socket
+    else:  # Platform-specific: Appengine.
+        wait_for_socket = null_wait_for_socket
+    return wait_for_socket(*args, **kwargs)
+
+
+def wait_for_read(sock, timeout=None):
+    """Waits for reading to be available on a given socket.
+    Returns True if the socket is readable, or False if the timeout expired.
+    """
+    return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock, timeout=None):
+    """Waits for writing to be available on a given socket.
+    Returns True if the socket is readable, or False if the timeout expired.
+    """
+    return wait_for_socket(sock, write=True, timeout=timeout)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/__init__.py b/venv/lib/python3.7/site-packages/werkzeug/__init__.py
new file mode 100644
index 00000000..5a71a379
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/__init__.py
@@ -0,0 +1,6 @@
+from .serving import run_simple as run_simple
+from .test import Client as Client
+from .wrappers import Request as Request
+from .wrappers import Response as Response
+
+__version__ = "2.0.1"
diff --git a/venv/lib/python3.7/site-packages/werkzeug/_internal.py b/venv/lib/python3.7/site-packages/werkzeug/_internal.py
new file mode 100644
index 00000000..7d33563e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/_internal.py
@@ -0,0 +1,626 @@
+import inspect
+import logging
+import operator
+import re
+import string
+import sys
+import typing
+import typing as t
+from datetime import date
+from datetime import datetime
+from datetime import timezone
+from itertools import chain
+from weakref import WeakKeyDictionary
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+    from .wrappers.request import Request  # noqa: F401
+
+_logger: t.Optional[logging.Logger] = None
+_signature_cache = WeakKeyDictionary()  # type: ignore
+_epoch_ord = date(1970, 1, 1).toordinal()
+_legal_cookie_chars = frozenset(
+    c.encode("ascii")
+    for c in f"{string.ascii_letters}{string.digits}/=!#$%&'*+-.^_`|~:"
+)
+
+_cookie_quoting_map = {b",": b"\\054", b";": b"\\073", b'"': b'\\"', b"\\": b"\\\\"}
+for _i in chain(range(32), range(127, 256)):
+    _cookie_quoting_map[_i.to_bytes(1, sys.byteorder)] = f"\\{_i:03o}".encode("latin1")
+
+_octal_re = re.compile(br"\\[0-3][0-7][0-7]")
+_quote_re = re.compile(br"[\\].")
+_legal_cookie_chars_re = br"[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
+_cookie_re = re.compile(
+    br"""
+    (?P<key>[^=;]+)
+    (?:\s*=\s*
+        (?P<val>
+            "(?:[^\\"]|\\.)*" |
+             (?:.*?)
+        )
+    )?
+    \s*;
+""",
+    flags=re.VERBOSE,
+)
+
+
+class _Missing:
+    def __repr__(self) -> str:
+        return "no value"
+
+    def __reduce__(self) -> str:
+        return "_missing"
+
+
+_missing = _Missing()
+
+
+@typing.overload
+def _make_encode_wrapper(reference: str) -> t.Callable[[str], str]:
+    ...
+
+
+@typing.overload
+def _make_encode_wrapper(reference: bytes) -> t.Callable[[str], bytes]:
+    ...
+
+
+def _make_encode_wrapper(reference: t.AnyStr) -> t.Callable[[str], t.AnyStr]:
+    """Create a function that will be called with a string argument. If
+    the reference is bytes, values will be encoded to bytes.
+    """
+    if isinstance(reference, str):
+        return lambda x: x
+
+    return operator.methodcaller("encode", "latin1")
+
+
+def _check_str_tuple(value: t.Tuple[t.AnyStr, ...]) -> None:
+    """Ensure tuple items are all strings or all bytes."""
+    if not value:
+        return
+
+    item_type = str if isinstance(value[0], str) else bytes
+
+    if any(not isinstance(item, item_type) for item in value):
+        raise TypeError(f"Cannot mix str and bytes arguments (got {value!r})")
+
+
+_default_encoding = sys.getdefaultencoding()
+
+
+def _to_bytes(
+    x: t.Union[str, bytes], charset: str = _default_encoding, errors: str = "strict"
+) -> bytes:
+    if x is None or isinstance(x, bytes):
+        return x
+
+    if isinstance(x, (bytearray, memoryview)):
+        return bytes(x)
+
+    if isinstance(x, str):
+        return x.encode(charset, errors)
+
+    raise TypeError("Expected bytes")
+
+
+@typing.overload
+def _to_str(  # type: ignore
+    x: None,
+    charset: t.Optional[str] = ...,
+    errors: str = ...,
+    allow_none_charset: bool = ...,
+) -> None:
+    ...
+
+
+@typing.overload
+def _to_str(
+    x: t.Any,
+    charset: t.Optional[str] = ...,
+    errors: str = ...,
+    allow_none_charset: bool = ...,
+) -> str:
+    ...
+
+
+def _to_str(
+    x: t.Optional[t.Any],
+    charset: t.Optional[str] = _default_encoding,
+    errors: str = "strict",
+    allow_none_charset: bool = False,
+) -> t.Optional[t.Union[str, bytes]]:
+    if x is None or isinstance(x, str):
+        return x
+
+    if not isinstance(x, (bytes, bytearray)):
+        return str(x)
+
+    if charset is None:
+        if allow_none_charset:
+            return x
+
+    return x.decode(charset, errors)  # type: ignore
+
+
+def _wsgi_decoding_dance(
+    s: str, charset: str = "utf-8", errors: str = "replace"
+) -> str:
+    return s.encode("latin1").decode(charset, errors)
+
+
+def _wsgi_encoding_dance(
+    s: str, charset: str = "utf-8", errors: str = "replace"
+) -> str:
+    if isinstance(s, bytes):
+        return s.decode("latin1", errors)
+
+    return s.encode(charset).decode("latin1", errors)
+
+
+def _get_environ(obj: t.Union["WSGIEnvironment", "Request"]) -> "WSGIEnvironment":
+    env = getattr(obj, "environ", obj)
+    assert isinstance(
+        env, dict
+    ), f"{type(obj).__name__!r} is not a WSGI environment (has to be a dict)"
+    return env
+
+
+def _has_level_handler(logger: logging.Logger) -> bool:
+    """Check if there is a handler in the logging chain that will handle
+    the given logger's effective level.
+    """
+    level = logger.getEffectiveLevel()
+    current = logger
+
+    while current:
+        if any(handler.level <= level for handler in current.handlers):
+            return True
+
+        if not current.propagate:
+            break
+
+        current = current.parent  # type: ignore
+
+    return False
+
+
+class _ColorStreamHandler(logging.StreamHandler):
+    """On Windows, wrap stream with Colorama for ANSI style support."""
+
+    def __init__(self) -> None:
+        try:
+            import colorama
+        except ImportError:
+            stream = None
+        else:
+            stream = colorama.AnsiToWin32(sys.stderr)
+
+        super().__init__(stream)
+
+
+def _log(type: str, message: str, *args: t.Any, **kwargs: t.Any) -> None:
+    """Log a message to the 'werkzeug' logger.
+
+    The logger is created the first time it is needed. If there is no
+    level set, it is set to :data:`logging.INFO`. If there is no handler
+    for the logger's effective level, a :class:`logging.StreamHandler`
+    is added.
+    """
+    global _logger
+
+    if _logger is None:
+        _logger = logging.getLogger("werkzeug")
+
+        if _logger.level == logging.NOTSET:
+            _logger.setLevel(logging.INFO)
+
+        if not _has_level_handler(_logger):
+            _logger.addHandler(_ColorStreamHandler())
+
+    getattr(_logger, type)(message.rstrip(), *args, **kwargs)
+
+
+def _parse_signature(func):  # type: ignore
+    """Return a signature object for the function.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1 along with ``utils.bind`` and
+        ``validate_arguments``.
+    """
+    # if we have a cached validator for this function, return it
+    parse = _signature_cache.get(func)
+    if parse is not None:
+        return parse
+
+    # inspect the function signature and collect all the information
+    tup = inspect.getfullargspec(func)
+    positional, vararg_var, kwarg_var, defaults = tup[:4]
+    defaults = defaults or ()
+    arg_count = len(positional)
+    arguments = []
+    for idx, name in enumerate(positional):
+        if isinstance(name, list):
+            raise TypeError(
+                "cannot parse functions that unpack tuples in the function signature"
+            )
+        try:
+            default = defaults[idx - arg_count]
+        except IndexError:
+            param = (name, False, None)
+        else:
+            param = (name, True, default)
+        arguments.append(param)
+    arguments = tuple(arguments)
+
+    def parse(args, kwargs):  # type: ignore
+        new_args = []
+        missing = []
+        extra = {}
+
+        # consume as many arguments as positional as possible
+        for idx, (name, has_default, default) in enumerate(arguments):
+            try:
+                new_args.append(args[idx])
+            except IndexError:
+                try:
+                    new_args.append(kwargs.pop(name))
+                except KeyError:
+                    if has_default:
+                        new_args.append(default)
+                    else:
+                        missing.append(name)
+            else:
+                if name in kwargs:
+                    extra[name] = kwargs.pop(name)
+
+        # handle extra arguments
+        extra_positional = args[arg_count:]
+        if vararg_var is not None:
+            new_args.extend(extra_positional)
+            extra_positional = ()
+        if kwargs and kwarg_var is None:
+            extra.update(kwargs)
+            kwargs = {}
+
+        return (
+            new_args,
+            kwargs,
+            missing,
+            extra,
+            extra_positional,
+            arguments,
+            vararg_var,
+            kwarg_var,
+        )
+
+    _signature_cache[func] = parse
+    return parse
+
+
+@typing.overload
+def _dt_as_utc(dt: None) -> None:
+    ...
+
+
+@typing.overload
+def _dt_as_utc(dt: datetime) -> datetime:
+    ...
+
+
+def _dt_as_utc(dt: t.Optional[datetime]) -> t.Optional[datetime]:
+    if dt is None:
+        return dt
+
+    if dt.tzinfo is None:
+        return dt.replace(tzinfo=timezone.utc)
+    elif dt.tzinfo != timezone.utc:
+        return dt.astimezone(timezone.utc)
+
+    return dt
+
+
+_TAccessorValue = t.TypeVar("_TAccessorValue")
+
+
+class _DictAccessorProperty(t.Generic[_TAccessorValue]):
+    """Baseclass for `environ_property` and `header_property`."""
+
+    read_only = False
+
+    def __init__(
+        self,
+        name: str,
+        default: t.Optional[_TAccessorValue] = None,
+        load_func: t.Optional[t.Callable[[str], _TAccessorValue]] = None,
+        dump_func: t.Optional[t.Callable[[_TAccessorValue], str]] = None,
+        read_only: t.Optional[bool] = None,
+        doc: t.Optional[str] = None,
+    ) -> None:
+        self.name = name
+        self.default = default
+        self.load_func = load_func
+        self.dump_func = dump_func
+        if read_only is not None:
+            self.read_only = read_only
+        self.__doc__ = doc
+
+    def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]:
+        raise NotImplementedError
+
+    @typing.overload
+    def __get__(
+        self, instance: None, owner: type
+    ) -> "_DictAccessorProperty[_TAccessorValue]":
+        ...
+
+    @typing.overload
+    def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue:
+        ...
+
+    def __get__(
+        self, instance: t.Optional[t.Any], owner: type
+    ) -> t.Union[_TAccessorValue, "_DictAccessorProperty[_TAccessorValue]"]:
+        if instance is None:
+            return self
+
+        storage = self.lookup(instance)
+
+        if self.name not in storage:
+            return self.default  # type: ignore
+
+        value = storage[self.name]
+
+        if self.load_func is not None:
+            try:
+                return self.load_func(value)
+            except (ValueError, TypeError):
+                return self.default  # type: ignore
+
+        return value  # type: ignore
+
+    def __set__(self, instance: t.Any, value: _TAccessorValue) -> None:
+        if self.read_only:
+            raise AttributeError("read only property")
+
+        if self.dump_func is not None:
+            self.lookup(instance)[self.name] = self.dump_func(value)
+        else:
+            self.lookup(instance)[self.name] = value
+
+    def __delete__(self, instance: t.Any) -> None:
+        if self.read_only:
+            raise AttributeError("read only property")
+
+        self.lookup(instance).pop(self.name, None)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.name}>"
+
+
+def _cookie_quote(b: bytes) -> bytes:
+    buf = bytearray()
+    all_legal = True
+    _lookup = _cookie_quoting_map.get
+    _push = buf.extend
+
+    for char_int in b:
+        char = char_int.to_bytes(1, sys.byteorder)
+        if char not in _legal_cookie_chars:
+            all_legal = False
+            char = _lookup(char, char)
+        _push(char)
+
+    if all_legal:
+        return bytes(buf)
+    return bytes(b'"' + buf + b'"')
+
+
+def _cookie_unquote(b: bytes) -> bytes:
+    if len(b) < 2:
+        return b
+    if b[:1] != b'"' or b[-1:] != b'"':
+        return b
+
+    b = b[1:-1]
+
+    i = 0
+    n = len(b)
+    rv = bytearray()
+    _push = rv.extend
+
+    while 0 <= i < n:
+        o_match = _octal_re.search(b, i)
+        q_match = _quote_re.search(b, i)
+        if not o_match and not q_match:
+            rv.extend(b[i:])
+            break
+        j = k = -1
+        if o_match:
+            j = o_match.start(0)
+        if q_match:
+            k = q_match.start(0)
+        if q_match and (not o_match or k < j):
+            _push(b[i:k])
+            _push(b[k + 1 : k + 2])
+            i = k + 2
+        else:
+            _push(b[i:j])
+            rv.append(int(b[j + 1 : j + 4], 8))
+            i = j + 4
+
+    return bytes(rv)
+
+
+def _cookie_parse_impl(b: bytes) -> t.Iterator[t.Tuple[bytes, bytes]]:
+    """Lowlevel cookie parsing facility that operates on bytes."""
+    i = 0
+    n = len(b)
+
+    while i < n:
+        match = _cookie_re.search(b + b";", i)
+        if not match:
+            break
+
+        key = match.group("key").strip()
+        value = match.group("val") or b""
+        i = match.end(0)
+
+        yield key, _cookie_unquote(value)
+
+
+def _encode_idna(domain: str) -> bytes:
+    # If we're given bytes, make sure they fit into ASCII
+    if isinstance(domain, bytes):
+        domain.decode("ascii")
+        return domain
+
+    # Otherwise check if it's already ascii, then return
+    try:
+        return domain.encode("ascii")
+    except UnicodeError:
+        pass
+
+    # Otherwise encode each part separately
+    return b".".join(p.encode("idna") for p in domain.split("."))
+
+
+def _decode_idna(domain: t.Union[str, bytes]) -> str:
+    # If the input is a string try to encode it to ascii to do the idna
+    # decoding. If that fails because of a unicode error, then we
+    # already have a decoded idna domain.
+    if isinstance(domain, str):
+        try:
+            domain = domain.encode("ascii")
+        except UnicodeError:
+            return domain  # type: ignore
+
+    # Decode each part separately. If a part fails, try to decode it
+    # with ascii and silently ignore errors. This makes sense because
+    # the idna codec does not have error handling.
+    def decode_part(part: bytes) -> str:
+        try:
+            return part.decode("idna")
+        except UnicodeError:
+            return part.decode("ascii", "ignore")
+
+    return ".".join(decode_part(p) for p in domain.split(b"."))
+
+
+@typing.overload
+def _make_cookie_domain(domain: None) -> None:
+    ...
+
+
+@typing.overload
+def _make_cookie_domain(domain: str) -> bytes:
+    ...
+
+
+def _make_cookie_domain(domain: t.Optional[str]) -> t.Optional[bytes]:
+    if domain is None:
+        return None
+    domain = _encode_idna(domain)
+    if b":" in domain:
+        domain = domain.split(b":", 1)[0]
+    if b"." in domain:
+        return domain
+    raise ValueError(
+        "Setting 'domain' for a cookie on a server running locally (ex: "
+        "localhost) is not supported by complying browsers. You should "
+        "have something like: '127.0.0.1 localhost dev.localhost' on "
+        "your hosts file and then point your server to run on "
+        "'dev.localhost' and also set 'domain' for 'dev.localhost'"
+    )
+
+
+def _easteregg(app: t.Optional["WSGIApplication"] = None) -> "WSGIApplication":
+    """Like the name says.  But who knows how it works?"""
+
+    def bzzzzzzz(gyver: bytes) -> str:
+        import base64
+        import zlib
+
+        return zlib.decompress(base64.b64decode(gyver)).decode("ascii")
+
+    gyver = "\n".join(
+        [
+            x + (77 - len(x)) * " "
+            for x in bzzzzzzz(
+                b"""
+eJyFlzuOJDkMRP06xRjymKgDJCDQStBYT8BCgK4gTwfQ2fcFs2a2FzvZk+hvlcRvRJD148efHt9m
+9Xz94dRY5hGt1nrYcXx7us9qlcP9HHNh28rz8dZj+q4rynVFFPdlY4zH873NKCexrDM6zxxRymzz
+4QIxzK4bth1PV7+uHn6WXZ5C4ka/+prFzx3zWLMHAVZb8RRUxtFXI5DTQ2n3Hi2sNI+HK43AOWSY
+jmEzE4naFp58PdzhPMdslLVWHTGUVpSxImw+pS/D+JhzLfdS1j7PzUMxij+mc2U0I9zcbZ/HcZxc
+q1QjvvcThMYFnp93agEx392ZdLJWXbi/Ca4Oivl4h/Y1ErEqP+lrg7Xa4qnUKu5UE9UUA4xeqLJ5
+jWlPKJvR2yhRI7xFPdzPuc6adXu6ovwXwRPXXnZHxlPtkSkqWHilsOrGrvcVWXgGP3daXomCj317
+8P2UOw/NnA0OOikZyFf3zZ76eN9QXNwYdD8f8/LdBRFg0BO3bB+Pe/+G8er8tDJv83XTkj7WeMBJ
+v/rnAfdO51d6sFglfi8U7zbnr0u9tyJHhFZNXYfH8Iafv2Oa+DT6l8u9UYlajV/hcEgk1x8E8L/r
+XJXl2SK+GJCxtnyhVKv6GFCEB1OO3f9YWAIEbwcRWv/6RPpsEzOkXURMN37J0PoCSYeBnJQd9Giu
+LxYQJNlYPSo/iTQwgaihbART7Fcyem2tTSCcwNCs85MOOpJtXhXDe0E7zgZJkcxWTar/zEjdIVCk
+iXy87FW6j5aGZhttDBoAZ3vnmlkx4q4mMmCdLtnHkBXFMCReqthSGkQ+MDXLLCpXwBs0t+sIhsDI
+tjBB8MwqYQpLygZ56rRHHpw+OAVyGgaGRHWy2QfXez+ZQQTTBkmRXdV/A9LwH6XGZpEAZU8rs4pE
+1R4FQ3Uwt8RKEtRc0/CrANUoes3EzM6WYcFyskGZ6UTHJWenBDS7h163Eo2bpzqxNE9aVgEM2CqI
+GAJe9Yra4P5qKmta27VjzYdR04Vc7KHeY4vs61C0nbywFmcSXYjzBHdiEjraS7PGG2jHHTpJUMxN
+Jlxr3pUuFvlBWLJGE3GcA1/1xxLcHmlO+LAXbhrXah1tD6Ze+uqFGdZa5FM+3eHcKNaEarutAQ0A
+QMAZHV+ve6LxAwWnXbbSXEG2DmCX5ijeLCKj5lhVFBrMm+ryOttCAeFpUdZyQLAQkA06RLs56rzG
+8MID55vqr/g64Qr/wqwlE0TVxgoiZhHrbY2h1iuuyUVg1nlkpDrQ7Vm1xIkI5XRKLedN9EjzVchu
+jQhXcVkjVdgP2O99QShpdvXWoSwkp5uMwyjt3jiWCqWGSiaaPAzohjPanXVLbM3x0dNskJsaCEyz
+DTKIs+7WKJD4ZcJGfMhLFBf6hlbnNkLEePF8Cx2o2kwmYF4+MzAxa6i+6xIQkswOqGO+3x9NaZX8
+MrZRaFZpLeVTYI9F/djY6DDVVs340nZGmwrDqTCiiqD5luj3OzwpmQCiQhdRYowUYEA3i1WWGwL4
+GCtSoO4XbIPFeKGU13XPkDf5IdimLpAvi2kVDVQbzOOa4KAXMFlpi/hV8F6IDe0Y2reg3PuNKT3i
+RYhZqtkQZqSB2Qm0SGtjAw7RDwaM1roESC8HWiPxkoOy0lLTRFG39kvbLZbU9gFKFRvixDZBJmpi
+Xyq3RE5lW00EJjaqwp/v3EByMSpVZYsEIJ4APaHmVtpGSieV5CALOtNUAzTBiw81GLgC0quyzf6c
+NlWknzJeCsJ5fup2R4d8CYGN77mu5vnO1UqbfElZ9E6cR6zbHjgsr9ly18fXjZoPeDjPuzlWbFwS
+pdvPkhntFvkc13qb9094LL5NrA3NIq3r9eNnop9DizWOqCEbyRBFJTHn6Tt3CG1o8a4HevYh0XiJ
+sR0AVVHuGuMOIfbuQ/OKBkGRC6NJ4u7sbPX8bG/n5sNIOQ6/Y/BX3IwRlTSabtZpYLB85lYtkkgm
+p1qXK3Du2mnr5INXmT/78KI12n11EFBkJHHp0wJyLe9MvPNUGYsf+170maayRoy2lURGHAIapSpQ
+krEDuNoJCHNlZYhKpvw4mspVWxqo415n8cD62N9+EfHrAvqQnINStetek7RY2Urv8nxsnGaZfRr/
+nhXbJ6m/yl1LzYqscDZA9QHLNbdaSTTr+kFg3bC0iYbX/eQy0Bv3h4B50/SGYzKAXkCeOLI3bcAt
+mj2Z/FM1vQWgDynsRwNvrWnJHlespkrp8+vO1jNaibm+PhqXPPv30YwDZ6jApe3wUjFQobghvW9p
+7f2zLkGNv8b191cD/3vs9Q833z8t"""
+            ).splitlines()
+        ]
+    )
+
+    def easteregged(
+        environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        def injecting_start_response(
+            status: str, headers: t.List[t.Tuple[str, str]], exc_info: t.Any = None
+        ) -> t.Callable[[bytes], t.Any]:
+            headers.append(("X-Powered-By", "Werkzeug"))
+            return start_response(status, headers, exc_info)
+
+        if app is not None and environ.get("QUERY_STRING") != "macgybarchakku":
+            return app(environ, injecting_start_response)
+        injecting_start_response("200 OK", [("Content-Type", "text/html")])
+        return [
+            f"""\
+<!DOCTYPE html>
+<html>
+<head>
+<title>About Werkzeug</title>
+<style type="text/css">
+  body {{ font: 15px Georgia, serif; text-align: center; }}
+  a {{ color: #333; text-decoration: none; }}
+  h1 {{ font-size: 30px; margin: 20px 0 10px 0; }}
+  p {{ margin: 0 0 30px 0; }}
+  pre {{ font: 11px 'Consolas', 'Monaco', monospace; line-height: 0.95; }}
+</style>
+</head>
+<body>
+<h1><a href="http://werkzeug.pocoo.org/">Werkzeug</a></h1>
+<p>the Swiss Army knife of Python web development.</p>
+<pre>{gyver}\n\n\n</pre>
+</body>
+</html>""".encode(
+                "latin1"
+            )
+        ]
+
+    return easteregged
diff --git a/venv/lib/python3.7/site-packages/werkzeug/_reloader.py b/venv/lib/python3.7/site-packages/werkzeug/_reloader.py
new file mode 100644
index 00000000..ab34533d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/_reloader.py
@@ -0,0 +1,430 @@
+import fnmatch
+import os
+import subprocess
+import sys
+import threading
+import time
+import typing as t
+from itertools import chain
+from pathlib import PurePath
+
+from ._internal import _log
+
+# The various system prefixes where imports are found. Base values are
+# different when running in a virtualenv. The stat reloader won't scan
+# these directories, it would be too inefficient.
+prefix = {sys.prefix, sys.base_prefix, sys.exec_prefix, sys.base_exec_prefix}
+
+if hasattr(sys, "real_prefix"):
+    # virtualenv < 20
+    prefix.add(sys.real_prefix)  # type: ignore
+
+_ignore_prefixes = tuple(prefix)
+del prefix
+
+
+def _iter_module_paths() -> t.Iterator[str]:
+    """Find the filesystem paths associated with imported modules."""
+    # List is in case the value is modified by the app while updating.
+    for module in list(sys.modules.values()):
+        name = getattr(module, "__file__", None)
+
+        if name is None:
+            continue
+
+        while not os.path.isfile(name):
+            # Zip file, find the base file without the module path.
+            old = name
+            name = os.path.dirname(name)
+
+            if name == old:  # skip if it was all directories somehow
+                break
+        else:
+            yield name
+
+
+def _remove_by_pattern(paths: t.Set[str], exclude_patterns: t.Set[str]) -> None:
+    for pattern in exclude_patterns:
+        paths.difference_update(fnmatch.filter(paths, pattern))
+
+
+def _find_stat_paths(
+    extra_files: t.Set[str], exclude_patterns: t.Set[str]
+) -> t.Iterable[str]:
+    """Find paths for the stat reloader to watch. Returns imported
+    module files, Python files under non-system paths. Extra files and
+    Python files under extra directories can also be scanned.
+
+    System paths have to be excluded for efficiency. Non-system paths,
+    such as a project root or ``sys.path.insert``, should be the paths
+    of interest to the user anyway.
+    """
+    paths = set()
+
+    for path in chain(list(sys.path), extra_files):
+        path = os.path.abspath(path)
+
+        if os.path.isfile(path):
+            # zip file on sys.path, or extra file
+            paths.add(path)
+
+        for root, dirs, files in os.walk(path):
+            # Ignore system prefixes for efficience. Don't scan
+            # __pycache__, it will have a py or pyc module at the import
+            # path. As an optimization, ignore .git and .hg since
+            # nothing interesting will be there.
+            if root.startswith(_ignore_prefixes) or os.path.basename(root) in {
+                "__pycache__",
+                ".git",
+                ".hg",
+            }:
+                dirs.clear()
+                continue
+
+            for name in files:
+                if name.endswith((".py", ".pyc")):
+                    paths.add(os.path.join(root, name))
+
+    paths.update(_iter_module_paths())
+    _remove_by_pattern(paths, exclude_patterns)
+    return paths
+
+
+def _find_watchdog_paths(
+    extra_files: t.Set[str], exclude_patterns: t.Set[str]
+) -> t.Iterable[str]:
+    """Find paths for the stat reloader to watch. Looks at the same
+    sources as the stat reloader, but watches everything under
+    directories instead of individual files.
+    """
+    dirs = set()
+
+    for name in chain(list(sys.path), extra_files):
+        name = os.path.abspath(name)
+
+        if os.path.isfile(name):
+            name = os.path.dirname(name)
+
+        dirs.add(name)
+
+    for name in _iter_module_paths():
+        dirs.add(os.path.dirname(name))
+
+    _remove_by_pattern(dirs, exclude_patterns)
+    return _find_common_roots(dirs)
+
+
+def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]:
+    root: t.Dict[str, dict] = {}
+
+    for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True):
+        node = root
+
+        for chunk in chunks:
+            node = node.setdefault(chunk, {})
+
+        node.clear()
+
+    rv = set()
+
+    def _walk(node: t.Mapping[str, dict], path: t.Tuple[str, ...]) -> None:
+        for prefix, child in node.items():
+            _walk(child, path + (prefix,))
+
+        if not node:
+            rv.add(os.path.join(*path))
+
+    _walk(root, ())
+    return rv
+
+
+def _get_args_for_reloading() -> t.List[str]:
+    """Determine how the script was executed, and return the args needed
+    to execute it again in a new process.
+    """
+    rv = [sys.executable]
+    py_script = sys.argv[0]
+    args = sys.argv[1:]
+    # Need to look at main module to determine how it was executed.
+    __main__ = sys.modules["__main__"]
+
+    # The value of __package__ indicates how Python was called. It may
+    # not exist if a setuptools script is installed as an egg. It may be
+    # set incorrectly for entry points created with pip on Windows.
+    if getattr(__main__, "__package__", None) is None or (
+        os.name == "nt"
+        and __main__.__package__ == ""
+        and not os.path.exists(py_script)
+        and os.path.exists(f"{py_script}.exe")
+    ):
+        # Executed a file, like "python app.py".
+        py_script = os.path.abspath(py_script)
+
+        if os.name == "nt":
+            # Windows entry points have ".exe" extension and should be
+            # called directly.
+            if not os.path.exists(py_script) and os.path.exists(f"{py_script}.exe"):
+                py_script += ".exe"
+
+            if (
+                os.path.splitext(sys.executable)[1] == ".exe"
+                and os.path.splitext(py_script)[1] == ".exe"
+            ):
+                rv.pop(0)
+
+        rv.append(py_script)
+    else:
+        # Executed a module, like "python -m werkzeug.serving".
+        if sys.argv[0] == "-m":
+            # Flask works around previous behavior by putting
+            # "-m flask" in sys.argv.
+            # TODO remove this once Flask no longer misbehaves
+            args = sys.argv
+        else:
+            if os.path.isfile(py_script):
+                # Rewritten by Python from "-m script" to "/path/to/script.py".
+                py_module = t.cast(str, __main__.__package__)
+                name = os.path.splitext(os.path.basename(py_script))[0]
+
+                if name != "__main__":
+                    py_module += f".{name}"
+            else:
+                # Incorrectly rewritten by pydevd debugger from "-m script" to "script".
+                py_module = py_script
+
+            rv.extend(("-m", py_module.lstrip(".")))
+
+    rv.extend(args)
+    return rv
+
+
+class ReloaderLoop:
+    name = ""
+
+    def __init__(
+        self,
+        extra_files: t.Optional[t.Iterable[str]] = None,
+        exclude_patterns: t.Optional[t.Iterable[str]] = None,
+        interval: t.Union[int, float] = 1,
+    ) -> None:
+        self.extra_files: t.Set[str] = {os.path.abspath(x) for x in extra_files or ()}
+        self.exclude_patterns: t.Set[str] = set(exclude_patterns or ())
+        self.interval = interval
+
+    def __enter__(self) -> "ReloaderLoop":
+        """Do any setup, then run one step of the watch to populate the
+        initial filesystem state.
+        """
+        self.run_step()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):  # type: ignore
+        """Clean up any resources associated with the reloader."""
+        pass
+
+    def run(self) -> None:
+        """Continually run the watch step, sleeping for the configured
+        interval after each step.
+        """
+        while True:
+            self.run_step()
+            time.sleep(self.interval)
+
+    def run_step(self) -> None:
+        """Run one step for watching the filesystem. Called once to set
+        up initial state, then repeatedly to update it.
+        """
+        pass
+
+    def restart_with_reloader(self) -> int:
+        """Spawn a new Python interpreter with the same arguments as the
+        current one, but running the reloader thread.
+        """
+        while True:
+            _log("info", f" * Restarting with {self.name}")
+            args = _get_args_for_reloading()
+            new_environ = os.environ.copy()
+            new_environ["WERKZEUG_RUN_MAIN"] = "true"
+            exit_code = subprocess.call(args, env=new_environ, close_fds=False)
+
+            if exit_code != 3:
+                return exit_code
+
+    def trigger_reload(self, filename: str) -> None:
+        self.log_reload(filename)
+        sys.exit(3)
+
+    def log_reload(self, filename: str) -> None:
+        filename = os.path.abspath(filename)
+        _log("info", f" * Detected change in {filename!r}, reloading")
+
+
+class StatReloaderLoop(ReloaderLoop):
+    name = "stat"
+
+    def __enter__(self) -> ReloaderLoop:
+        self.mtimes: t.Dict[str, float] = {}
+        return super().__enter__()
+
+    def run_step(self) -> None:
+        for name in chain(_find_stat_paths(self.extra_files, self.exclude_patterns)):
+            try:
+                mtime = os.stat(name).st_mtime
+            except OSError:
+                continue
+
+            old_time = self.mtimes.get(name)
+
+            if old_time is None:
+                self.mtimes[name] = mtime
+                continue
+
+            if mtime > old_time:
+                self.trigger_reload(name)
+
+
+class WatchdogReloaderLoop(ReloaderLoop):
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        from watchdog.observers import Observer
+        from watchdog.events import PatternMatchingEventHandler
+
+        super().__init__(*args, **kwargs)
+        trigger_reload = self.trigger_reload
+
+        class EventHandler(PatternMatchingEventHandler):  # type: ignore
+            def on_any_event(self, event):  # type: ignore
+                trigger_reload(event.src_path)
+
+        reloader_name = Observer.__name__.lower()
+
+        if reloader_name.endswith("observer"):
+            reloader_name = reloader_name[:-8]
+
+        self.name = f"watchdog ({reloader_name})"
+        self.observer = Observer()
+        # Extra patterns can be non-Python files, match them in addition
+        # to all Python files in default and extra directories. Ignore
+        # __pycache__ since a change there will always have a change to
+        # the source file (or initial pyc file) as well. Ignore Git and
+        # Mercurial internal changes.
+        extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)]
+        self.event_handler = EventHandler(
+            patterns=["*.py", "*.pyc", "*.zip", *extra_patterns],
+            ignore_patterns=[
+                "*/__pycache__/*",
+                "*/.git/*",
+                "*/.hg/*",
+                *self.exclude_patterns,
+            ],
+        )
+        self.should_reload = False
+
+    def trigger_reload(self, filename: str) -> None:
+        # This is called inside an event handler, which means throwing
+        # SystemExit has no effect.
+        # https://github.com/gorakhargosh/watchdog/issues/294
+        self.should_reload = True
+        self.log_reload(filename)
+
+    def __enter__(self) -> ReloaderLoop:
+        self.watches: t.Dict[str, t.Any] = {}
+        self.observer.start()
+        return super().__enter__()
+
+    def __exit__(self, exc_type, exc_val, exc_tb):  # type: ignore
+        self.observer.stop()
+        self.observer.join()
+
+    def run(self) -> None:
+        while not self.should_reload:
+            self.run_step()
+            time.sleep(self.interval)
+
+        sys.exit(3)
+
+    def run_step(self) -> None:
+        to_delete = set(self.watches)
+
+        for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns):
+            if path not in self.watches:
+                try:
+                    self.watches[path] = self.observer.schedule(
+                        self.event_handler, path, recursive=True
+                    )
+                except OSError:
+                    # Clear this path from list of watches We don't want
+                    # the same error message showing again in the next
+                    # iteration.
+                    self.watches[path] = None
+
+            to_delete.discard(path)
+
+        for path in to_delete:
+            watch = self.watches.pop(path, None)
+
+            if watch is not None:
+                self.observer.unschedule(watch)
+
+
+reloader_loops: t.Dict[str, t.Type[ReloaderLoop]] = {
+    "stat": StatReloaderLoop,
+    "watchdog": WatchdogReloaderLoop,
+}
+
+try:
+    __import__("watchdog.observers")
+except ImportError:
+    reloader_loops["auto"] = reloader_loops["stat"]
+else:
+    reloader_loops["auto"] = reloader_loops["watchdog"]
+
+
+def ensure_echo_on() -> None:
+    """Ensure that echo mode is enabled. Some tools such as PDB disable
+    it which causes usability issues after a reload."""
+    # tcgetattr will fail if stdin isn't a tty
+    if sys.stdin is None or not sys.stdin.isatty():
+        return
+
+    try:
+        import termios
+    except ImportError:
+        return
+
+    attributes = termios.tcgetattr(sys.stdin)
+
+    if not attributes[3] & termios.ECHO:
+        attributes[3] |= termios.ECHO
+        termios.tcsetattr(sys.stdin, termios.TCSANOW, attributes)
+
+
+def run_with_reloader(
+    main_func: t.Callable[[], None],
+    extra_files: t.Optional[t.Iterable[str]] = None,
+    exclude_patterns: t.Optional[t.Iterable[str]] = None,
+    interval: t.Union[int, float] = 1,
+    reloader_type: str = "auto",
+) -> None:
+    """Run the given function in an independent Python interpreter."""
+    import signal
+
+    signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
+    reloader = reloader_loops[reloader_type](
+        extra_files=extra_files, exclude_patterns=exclude_patterns, interval=interval
+    )
+
+    try:
+        if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
+            ensure_echo_on()
+            t = threading.Thread(target=main_func, args=())
+            t.daemon = True
+
+            # Enter the reloader to set up initial state, then start
+            # the app thread and reloader update loop.
+            with reloader:
+                t.start()
+                reloader.run()
+        else:
+            sys.exit(reloader.restart_with_reloader())
+    except KeyboardInterrupt:
+        pass
diff --git a/venv/lib/python3.7/site-packages/werkzeug/datastructures.py b/venv/lib/python3.7/site-packages/werkzeug/datastructures.py
new file mode 100644
index 00000000..5fb59281
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/datastructures.py
@@ -0,0 +1,3051 @@
+import base64
+import codecs
+import mimetypes
+import re
+import warnings
+from collections.abc import Collection
+from collections.abc import MutableSet
+from copy import deepcopy
+from io import BytesIO
+from itertools import repeat
+from os import fspath
+
+from . import exceptions
+from ._internal import _make_encode_wrapper
+from ._internal import _missing
+from .filesystem import get_filesystem_encoding
+
+
+def is_immutable(self):
+    raise TypeError(f"{type(self).__name__!r} objects are immutable")
+
+
+def iter_multi_items(mapping):
+    """Iterates over the items of a mapping yielding keys and values
+    without dropping any from more complex structures.
+    """
+    if isinstance(mapping, MultiDict):
+        yield from mapping.items(multi=True)
+    elif isinstance(mapping, dict):
+        for key, value in mapping.items():
+            if isinstance(value, (tuple, list)):
+                for v in value:
+                    yield key, v
+            else:
+                yield key, value
+    else:
+        yield from mapping
+
+
+class ImmutableListMixin:
+    """Makes a :class:`list` immutable.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    _hash_cache = None
+
+    def __hash__(self):
+        if self._hash_cache is not None:
+            return self._hash_cache
+        rv = self._hash_cache = hash(tuple(self))
+        return rv
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (list(self),)
+
+    def __delitem__(self, key):
+        is_immutable(self)
+
+    def __iadd__(self, other):
+        is_immutable(self)
+
+    def __imul__(self, other):
+        is_immutable(self)
+
+    def __setitem__(self, key, value):
+        is_immutable(self)
+
+    def append(self, item):
+        is_immutable(self)
+
+    def remove(self, item):
+        is_immutable(self)
+
+    def extend(self, iterable):
+        is_immutable(self)
+
+    def insert(self, pos, value):
+        is_immutable(self)
+
+    def pop(self, index=-1):
+        is_immutable(self)
+
+    def reverse(self):
+        is_immutable(self)
+
+    def sort(self, key=None, reverse=False):
+        is_immutable(self)
+
+
+class ImmutableList(ImmutableListMixin, list):
+    """An immutable :class:`list`.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    def __repr__(self):
+        return f"{type(self).__name__}({list.__repr__(self)})"
+
+
+class ImmutableDictMixin:
+    """Makes a :class:`dict` immutable.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    _hash_cache = None
+
+    @classmethod
+    def fromkeys(cls, keys, value=None):
+        instance = super().__new__(cls)
+        instance.__init__(zip(keys, repeat(value)))
+        return instance
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (dict(self),)
+
+    def _iter_hashitems(self):
+        return self.items()
+
+    def __hash__(self):
+        if self._hash_cache is not None:
+            return self._hash_cache
+        rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
+        return rv
+
+    def setdefault(self, key, default=None):
+        is_immutable(self)
+
+    def update(self, *args, **kwargs):
+        is_immutable(self)
+
+    def pop(self, key, default=None):
+        is_immutable(self)
+
+    def popitem(self):
+        is_immutable(self)
+
+    def __setitem__(self, key, value):
+        is_immutable(self)
+
+    def __delitem__(self, key):
+        is_immutable(self)
+
+    def clear(self):
+        is_immutable(self)
+
+
+class ImmutableMultiDictMixin(ImmutableDictMixin):
+    """Makes a :class:`MultiDict` immutable.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (list(self.items(multi=True)),)
+
+    def _iter_hashitems(self):
+        return self.items(multi=True)
+
+    def add(self, key, value):
+        is_immutable(self)
+
+    def popitemlist(self):
+        is_immutable(self)
+
+    def poplist(self, key):
+        is_immutable(self)
+
+    def setlist(self, key, new_list):
+        is_immutable(self)
+
+    def setlistdefault(self, key, default_list=None):
+        is_immutable(self)
+
+
+def _calls_update(name):
+    def oncall(self, *args, **kw):
+        rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw)
+
+        if self.on_update is not None:
+            self.on_update(self)
+
+        return rv
+
+    oncall.__name__ = name
+    return oncall
+
+
+class UpdateDictMixin(dict):
+    """Makes dicts call `self.on_update` on modifications.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    on_update = None
+
+    def setdefault(self, key, default=None):
+        modified = key not in self
+        rv = super().setdefault(key, default)
+        if modified and self.on_update is not None:
+            self.on_update(self)
+        return rv
+
+    def pop(self, key, default=_missing):
+        modified = key in self
+        if default is _missing:
+            rv = super().pop(key)
+        else:
+            rv = super().pop(key, default)
+        if modified and self.on_update is not None:
+            self.on_update(self)
+        return rv
+
+    __setitem__ = _calls_update("__setitem__")
+    __delitem__ = _calls_update("__delitem__")
+    clear = _calls_update("clear")
+    popitem = _calls_update("popitem")
+    update = _calls_update("update")
+
+
+class TypeConversionDict(dict):
+    """Works like a regular dict but the :meth:`get` method can perform
+    type conversions.  :class:`MultiDict` and :class:`CombinedMultiDict`
+    are subclasses of this class and provide the same feature.
+
+    .. versionadded:: 0.5
+    """
+
+    def get(self, key, default=None, type=None):
+        """Return the default value if the requested data doesn't exist.
+        If `type` is provided and is a callable it should convert the value,
+        return it or raise a :exc:`ValueError` if that is not possible.  In
+        this case the function will return the default as if the value was not
+        found:
+
+        >>> d = TypeConversionDict(foo='42', bar='blub')
+        >>> d.get('foo', type=int)
+        42
+        >>> d.get('bar', -1, type=int)
+        -1
+
+        :param key: The key to be looked up.
+        :param default: The default value to be returned if the key can't
+                        be looked up.  If not further specified `None` is
+                        returned.
+        :param type: A callable that is used to cast the value in the
+                     :class:`MultiDict`.  If a :exc:`ValueError` is raised
+                     by this callable the default value is returned.
+        """
+        try:
+            rv = self[key]
+        except KeyError:
+            return default
+        if type is not None:
+            try:
+                rv = type(rv)
+            except ValueError:
+                rv = default
+        return rv
+
+
+class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict):
+    """Works like a :class:`TypeConversionDict` but does not support
+    modifications.
+
+    .. versionadded:: 0.5
+    """
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.  Keep in mind that
+        the standard library's :func:`copy` function is a no-op for this class
+        like for any other python immutable type (eg: :class:`tuple`).
+        """
+        return TypeConversionDict(self)
+
+    def __copy__(self):
+        return self
+
+
+class MultiDict(TypeConversionDict):
+    """A :class:`MultiDict` is a dictionary subclass customized to deal with
+    multiple values for the same key which is for example used by the parsing
+    functions in the wrappers.  This is necessary because some HTML form
+    elements pass multiple values for the same key.
+
+    :class:`MultiDict` implements all standard dictionary methods.
+    Internally, it saves all values for a key as a list, but the standard dict
+    access methods will only return the first value for a key. If you want to
+    gain access to the other values, too, you have to use the `list` methods as
+    explained below.
+
+    Basic Usage:
+
+    >>> d = MultiDict([('a', 'b'), ('a', 'c')])
+    >>> d
+    MultiDict([('a', 'b'), ('a', 'c')])
+    >>> d['a']
+    'b'
+    >>> d.getlist('a')
+    ['b', 'c']
+    >>> 'a' in d
+    True
+
+    It behaves like a normal dict thus all dict functions will only return the
+    first value when multiple values for one key are found.
+
+    From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
+    subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
+    exceptions.
+
+    A :class:`MultiDict` can be constructed from an iterable of
+    ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2
+    onwards some keyword parameters.
+
+    :param mapping: the initial value for the :class:`MultiDict`.  Either a
+                    regular dict, an iterable of ``(key, value)`` tuples
+                    or `None`.
+    """
+
+    def __init__(self, mapping=None):
+        if isinstance(mapping, MultiDict):
+            dict.__init__(self, ((k, l[:]) for k, l in mapping.lists()))
+        elif isinstance(mapping, dict):
+            tmp = {}
+            for key, value in mapping.items():
+                if isinstance(value, (tuple, list)):
+                    if len(value) == 0:
+                        continue
+                    value = list(value)
+                else:
+                    value = [value]
+                tmp[key] = value
+            dict.__init__(self, tmp)
+        else:
+            tmp = {}
+            for key, value in mapping or ():
+                tmp.setdefault(key, []).append(value)
+            dict.__init__(self, tmp)
+
+    def __getstate__(self):
+        return dict(self.lists())
+
+    def __setstate__(self, value):
+        dict.clear(self)
+        dict.update(self, value)
+
+    def __iter__(self):
+        # Work around https://bugs.python.org/issue43246.
+        # (`return super().__iter__()` also works here, which makes this look
+        # even more like it should be a no-op, yet it isn't.)
+        return dict.__iter__(self)
+
+    def __getitem__(self, key):
+        """Return the first data value for this key;
+        raises KeyError if not found.
+
+        :param key: The key to be looked up.
+        :raise KeyError: if the key does not exist.
+        """
+
+        if key in self:
+            lst = dict.__getitem__(self, key)
+            if len(lst) > 0:
+                return lst[0]
+        raise exceptions.BadRequestKeyError(key)
+
+    def __setitem__(self, key, value):
+        """Like :meth:`add` but removes an existing key first.
+
+        :param key: the key for the value.
+        :param value: the value to set.
+        """
+        dict.__setitem__(self, key, [value])
+
+    def add(self, key, value):
+        """Adds a new value for the key.
+
+        .. versionadded:: 0.6
+
+        :param key: the key for the value.
+        :param value: the value to add.
+        """
+        dict.setdefault(self, key, []).append(value)
+
+    def getlist(self, key, type=None):
+        """Return the list of items for a given key. If that key is not in the
+        `MultiDict`, the return value will be an empty list.  Just like `get`,
+        `getlist` accepts a `type` parameter.  All items will be converted
+        with the callable defined there.
+
+        :param key: The key to be looked up.
+        :param type: A callable that is used to cast the value in the
+                     :class:`MultiDict`.  If a :exc:`ValueError` is raised
+                     by this callable the value will be removed from the list.
+        :return: a :class:`list` of all the values for the key.
+        """
+        try:
+            rv = dict.__getitem__(self, key)
+        except KeyError:
+            return []
+        if type is None:
+            return list(rv)
+        result = []
+        for item in rv:
+            try:
+                result.append(type(item))
+            except ValueError:
+                pass
+        return result
+
+    def setlist(self, key, new_list):
+        """Remove the old values for a key and add new ones.  Note that the list
+        you pass the values in will be shallow-copied before it is inserted in
+        the dictionary.
+
+        >>> d = MultiDict()
+        >>> d.setlist('foo', ['1', '2'])
+        >>> d['foo']
+        '1'
+        >>> d.getlist('foo')
+        ['1', '2']
+
+        :param key: The key for which the values are set.
+        :param new_list: An iterable with the new values for the key.  Old values
+                         are removed first.
+        """
+        dict.__setitem__(self, key, list(new_list))
+
+    def setdefault(self, key, default=None):
+        """Returns the value for the key if it is in the dict, otherwise it
+        returns `default` and sets that value for `key`.
+
+        :param key: The key to be looked up.
+        :param default: The default value to be returned if the key is not
+                        in the dict.  If not further specified it's `None`.
+        """
+        if key not in self:
+            self[key] = default
+        else:
+            default = self[key]
+        return default
+
+    def setlistdefault(self, key, default_list=None):
+        """Like `setdefault` but sets multiple values.  The list returned
+        is not a copy, but the list that is actually used internally.  This
+        means that you can put new values into the dict by appending items
+        to the list:
+
+        >>> d = MultiDict({"foo": 1})
+        >>> d.setlistdefault("foo").extend([2, 3])
+        >>> d.getlist("foo")
+        [1, 2, 3]
+
+        :param key: The key to be looked up.
+        :param default_list: An iterable of default values.  It is either copied
+                             (in case it was a list) or converted into a list
+                             before returned.
+        :return: a :class:`list`
+        """
+        if key not in self:
+            default_list = list(default_list or ())
+            dict.__setitem__(self, key, default_list)
+        else:
+            default_list = dict.__getitem__(self, key)
+        return default_list
+
+    def items(self, multi=False):
+        """Return an iterator of ``(key, value)`` pairs.
+
+        :param multi: If set to `True` the iterator returned will have a pair
+                      for each value of each key.  Otherwise it will only
+                      contain pairs for the first value of each key.
+        """
+        for key, values in dict.items(self):
+            if multi:
+                for value in values:
+                    yield key, value
+            else:
+                yield key, values[0]
+
+    def lists(self):
+        """Return a iterator of ``(key, values)`` pairs, where values is the list
+        of all values associated with the key."""
+        for key, values in dict.items(self):
+            yield key, list(values)
+
+    def values(self):
+        """Returns an iterator of the first value on every key's value list."""
+        for values in dict.values(self):
+            yield values[0]
+
+    def listvalues(self):
+        """Return an iterator of all values associated with a key.  Zipping
+        :meth:`keys` and this is the same as calling :meth:`lists`:
+
+        >>> d = MultiDict({"foo": [1, 2, 3]})
+        >>> zip(d.keys(), d.listvalues()) == d.lists()
+        True
+        """
+        return dict.values(self)
+
+    def copy(self):
+        """Return a shallow copy of this object."""
+        return self.__class__(self)
+
+    def deepcopy(self, memo=None):
+        """Return a deep copy of this object."""
+        return self.__class__(deepcopy(self.to_dict(flat=False), memo))
+
+    def to_dict(self, flat=True):
+        """Return the contents as regular dict.  If `flat` is `True` the
+        returned dict will only have the first item present, if `flat` is
+        `False` all values will be returned as lists.
+
+        :param flat: If set to `False` the dict returned will have lists
+                     with all the values in it.  Otherwise it will only
+                     contain the first value for each key.
+        :return: a :class:`dict`
+        """
+        if flat:
+            return dict(self.items())
+        return dict(self.lists())
+
+    def update(self, mapping):
+        """update() extends rather than replaces existing key lists:
+
+        >>> a = MultiDict({'x': 1})
+        >>> b = MultiDict({'x': 2, 'y': 3})
+        >>> a.update(b)
+        >>> a
+        MultiDict([('y', 3), ('x', 1), ('x', 2)])
+
+        If the value list for a key in ``other_dict`` is empty, no new values
+        will be added to the dict and the key will not be created:
+
+        >>> x = {'empty_list': []}
+        >>> y = MultiDict()
+        >>> y.update(x)
+        >>> y
+        MultiDict([])
+        """
+        for key, value in iter_multi_items(mapping):
+            MultiDict.add(self, key, value)
+
+    def pop(self, key, default=_missing):
+        """Pop the first item for a list on the dict.  Afterwards the
+        key is removed from the dict, so additional values are discarded:
+
+        >>> d = MultiDict({"foo": [1, 2, 3]})
+        >>> d.pop("foo")
+        1
+        >>> "foo" in d
+        False
+
+        :param key: the key to pop.
+        :param default: if provided the value to return if the key was
+                        not in the dictionary.
+        """
+        try:
+            lst = dict.pop(self, key)
+
+            if len(lst) == 0:
+                raise exceptions.BadRequestKeyError(key)
+
+            return lst[0]
+        except KeyError:
+            if default is not _missing:
+                return default
+            raise exceptions.BadRequestKeyError(key)
+
+    def popitem(self):
+        """Pop an item from the dict."""
+        try:
+            item = dict.popitem(self)
+
+            if len(item[1]) == 0:
+                raise exceptions.BadRequestKeyError(item)
+
+            return (item[0], item[1][0])
+        except KeyError as e:
+            raise exceptions.BadRequestKeyError(e.args[0])
+
+    def poplist(self, key):
+        """Pop the list for a key from the dict.  If the key is not in the dict
+        an empty list is returned.
+
+        .. versionchanged:: 0.5
+           If the key does no longer exist a list is returned instead of
+           raising an error.
+        """
+        return dict.pop(self, key, [])
+
+    def popitemlist(self):
+        """Pop a ``(key, list)`` tuple from the dict."""
+        try:
+            return dict.popitem(self)
+        except KeyError as e:
+            raise exceptions.BadRequestKeyError(e.args[0])
+
+    def __copy__(self):
+        return self.copy()
+
+    def __deepcopy__(self, memo):
+        return self.deepcopy(memo=memo)
+
+    def __repr__(self):
+        return f"{type(self).__name__}({list(self.items(multi=True))!r})"
+
+
+class _omd_bucket:
+    """Wraps values in the :class:`OrderedMultiDict`.  This makes it
+    possible to keep an order over multiple different keys.  It requires
+    a lot of extra memory and slows down access a lot, but makes it
+    possible to access elements in O(1) and iterate in O(n).
+    """
+
+    __slots__ = ("prev", "key", "value", "next")
+
+    def __init__(self, omd, key, value):
+        self.prev = omd._last_bucket
+        self.key = key
+        self.value = value
+        self.next = None
+
+        if omd._first_bucket is None:
+            omd._first_bucket = self
+        if omd._last_bucket is not None:
+            omd._last_bucket.next = self
+        omd._last_bucket = self
+
+    def unlink(self, omd):
+        if self.prev:
+            self.prev.next = self.next
+        if self.next:
+            self.next.prev = self.prev
+        if omd._first_bucket is self:
+            omd._first_bucket = self.next
+        if omd._last_bucket is self:
+            omd._last_bucket = self.prev
+
+
+class OrderedMultiDict(MultiDict):
+    """Works like a regular :class:`MultiDict` but preserves the
+    order of the fields.  To convert the ordered multi dict into a
+    list you can use the :meth:`items` method and pass it ``multi=True``.
+
+    In general an :class:`OrderedMultiDict` is an order of magnitude
+    slower than a :class:`MultiDict`.
+
+    .. admonition:: note
+
+       Due to a limitation in Python you cannot convert an ordered
+       multi dict into a regular dict by using ``dict(multidict)``.
+       Instead you have to use the :meth:`to_dict` method, otherwise
+       the internal bucket objects are exposed.
+    """
+
+    def __init__(self, mapping=None):
+        dict.__init__(self)
+        self._first_bucket = self._last_bucket = None
+        if mapping is not None:
+            OrderedMultiDict.update(self, mapping)
+
+    def __eq__(self, other):
+        if not isinstance(other, MultiDict):
+            return NotImplemented
+        if isinstance(other, OrderedMultiDict):
+            iter1 = iter(self.items(multi=True))
+            iter2 = iter(other.items(multi=True))
+            try:
+                for k1, v1 in iter1:
+                    k2, v2 = next(iter2)
+                    if k1 != k2 or v1 != v2:
+                        return False
+            except StopIteration:
+                return False
+            try:
+                next(iter2)
+            except StopIteration:
+                return True
+            return False
+        if len(self) != len(other):
+            return False
+        for key, values in self.lists():
+            if other.getlist(key) != values:
+                return False
+        return True
+
+    __hash__ = None
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (list(self.items(multi=True)),)
+
+    def __getstate__(self):
+        return list(self.items(multi=True))
+
+    def __setstate__(self, values):
+        dict.clear(self)
+        for key, value in values:
+            self.add(key, value)
+
+    def __getitem__(self, key):
+        if key in self:
+            return dict.__getitem__(self, key)[0].value
+        raise exceptions.BadRequestKeyError(key)
+
+    def __setitem__(self, key, value):
+        self.poplist(key)
+        self.add(key, value)
+
+    def __delitem__(self, key):
+        self.pop(key)
+
+    def keys(self):
+        return (key for key, value in self.items())
+
+    def __iter__(self):
+        return iter(self.keys())
+
+    def values(self):
+        return (value for key, value in self.items())
+
+    def items(self, multi=False):
+        ptr = self._first_bucket
+        if multi:
+            while ptr is not None:
+                yield ptr.key, ptr.value
+                ptr = ptr.next
+        else:
+            returned_keys = set()
+            while ptr is not None:
+                if ptr.key not in returned_keys:
+                    returned_keys.add(ptr.key)
+                    yield ptr.key, ptr.value
+                ptr = ptr.next
+
+    def lists(self):
+        returned_keys = set()
+        ptr = self._first_bucket
+        while ptr is not None:
+            if ptr.key not in returned_keys:
+                yield ptr.key, self.getlist(ptr.key)
+                returned_keys.add(ptr.key)
+            ptr = ptr.next
+
+    def listvalues(self):
+        for _key, values in self.lists():
+            yield values
+
+    def add(self, key, value):
+        dict.setdefault(self, key, []).append(_omd_bucket(self, key, value))
+
+    def getlist(self, key, type=None):
+        try:
+            rv = dict.__getitem__(self, key)
+        except KeyError:
+            return []
+        if type is None:
+            return [x.value for x in rv]
+        result = []
+        for item in rv:
+            try:
+                result.append(type(item.value))
+            except ValueError:
+                pass
+        return result
+
+    def setlist(self, key, new_list):
+        self.poplist(key)
+        for value in new_list:
+            self.add(key, value)
+
+    def setlistdefault(self, key, default_list=None):
+        raise TypeError("setlistdefault is unsupported for ordered multi dicts")
+
+    def update(self, mapping):
+        for key, value in iter_multi_items(mapping):
+            OrderedMultiDict.add(self, key, value)
+
+    def poplist(self, key):
+        buckets = dict.pop(self, key, ())
+        for bucket in buckets:
+            bucket.unlink(self)
+        return [x.value for x in buckets]
+
+    def pop(self, key, default=_missing):
+        try:
+            buckets = dict.pop(self, key)
+        except KeyError:
+            if default is not _missing:
+                return default
+            raise exceptions.BadRequestKeyError(key)
+        for bucket in buckets:
+            bucket.unlink(self)
+        return buckets[0].value
+
+    def popitem(self):
+        try:
+            key, buckets = dict.popitem(self)
+        except KeyError as e:
+            raise exceptions.BadRequestKeyError(e.args[0])
+        for bucket in buckets:
+            bucket.unlink(self)
+        return key, buckets[0].value
+
+    def popitemlist(self):
+        try:
+            key, buckets = dict.popitem(self)
+        except KeyError as e:
+            raise exceptions.BadRequestKeyError(e.args[0])
+        for bucket in buckets:
+            bucket.unlink(self)
+        return key, [x.value for x in buckets]
+
+
+def _options_header_vkw(value, kw):
+    return http.dump_options_header(
+        value, {k.replace("_", "-"): v for k, v in kw.items()}
+    )
+
+
+def _unicodify_header_value(value):
+    if isinstance(value, bytes):
+        value = value.decode("latin-1")
+    if not isinstance(value, str):
+        value = str(value)
+    return value
+
+
+class Headers:
+    """An object that stores some headers. It has a dict-like interface,
+    but is ordered, can store the same key multiple times, and iterating
+    yields ``(key, value)`` pairs instead of only keys.
+
+    This data structure is useful if you want a nicer way to handle WSGI
+    headers which are stored as tuples in a list.
+
+    From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is
+    also a subclass of the :class:`~exceptions.BadRequest` HTTP exception
+    and will render a page for a ``400 BAD REQUEST`` if caught in a
+    catch-all for HTTP exceptions.
+
+    Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers`
+    class, with the exception of `__getitem__`.  :mod:`wsgiref` will return
+    `None` for ``headers['missing']``, whereas :class:`Headers` will raise
+    a :class:`KeyError`.
+
+    To create a new :class:`Headers` object pass it a list or dict of headers
+    which are used as default values.  This does not reuse the list passed
+    to the constructor for internal usage.
+
+    :param defaults: The list of default values for the :class:`Headers`.
+
+    .. versionchanged:: 0.9
+       This data structure now stores unicode values similar to how the
+       multi dicts do it.  The main difference is that bytes can be set as
+       well which will automatically be latin1 decoded.
+
+    .. versionchanged:: 0.9
+       The :meth:`linked` function was removed without replacement as it
+       was an API that does not support the changes to the encoding model.
+    """
+
+    def __init__(self, defaults=None):
+        self._list = []
+        if defaults is not None:
+            if isinstance(defaults, (list, Headers)):
+                self._list.extend(defaults)
+            else:
+                self.extend(defaults)
+
+    def __getitem__(self, key, _get_mode=False):
+        if not _get_mode:
+            if isinstance(key, int):
+                return self._list[key]
+            elif isinstance(key, slice):
+                return self.__class__(self._list[key])
+        if not isinstance(key, str):
+            raise exceptions.BadRequestKeyError(key)
+        ikey = key.lower()
+        for k, v in self._list:
+            if k.lower() == ikey:
+                return v
+        # micro optimization: if we are in get mode we will catch that
+        # exception one stack level down so we can raise a standard
+        # key error instead of our special one.
+        if _get_mode:
+            raise KeyError()
+        raise exceptions.BadRequestKeyError(key)
+
+    def __eq__(self, other):
+        def lowered(item):
+            return (item[0].lower(),) + item[1:]
+
+        return other.__class__ is self.__class__ and set(
+            map(lowered, other._list)
+        ) == set(map(lowered, self._list))
+
+    __hash__ = None
+
+    def get(self, key, default=None, type=None, as_bytes=False):
+        """Return the default value if the requested data doesn't exist.
+        If `type` is provided and is a callable it should convert the value,
+        return it or raise a :exc:`ValueError` if that is not possible.  In
+        this case the function will return the default as if the value was not
+        found:
+
+        >>> d = Headers([('Content-Length', '42')])
+        >>> d.get('Content-Length', type=int)
+        42
+
+        .. versionadded:: 0.9
+           Added support for `as_bytes`.
+
+        :param key: The key to be looked up.
+        :param default: The default value to be returned if the key can't
+                        be looked up.  If not further specified `None` is
+                        returned.
+        :param type: A callable that is used to cast the value in the
+                     :class:`Headers`.  If a :exc:`ValueError` is raised
+                     by this callable the default value is returned.
+        :param as_bytes: return bytes instead of strings.
+        """
+        try:
+            rv = self.__getitem__(key, _get_mode=True)
+        except KeyError:
+            return default
+        if as_bytes:
+            rv = rv.encode("latin1")
+        if type is None:
+            return rv
+        try:
+            return type(rv)
+        except ValueError:
+            return default
+
+    def getlist(self, key, type=None, as_bytes=False):
+        """Return the list of items for a given key. If that key is not in the
+        :class:`Headers`, the return value will be an empty list.  Just like
+        :meth:`get`, :meth:`getlist` accepts a `type` parameter.  All items will
+        be converted with the callable defined there.
+
+        .. versionadded:: 0.9
+           Added support for `as_bytes`.
+
+        :param key: The key to be looked up.
+        :param type: A callable that is used to cast the value in the
+                     :class:`Headers`.  If a :exc:`ValueError` is raised
+                     by this callable the value will be removed from the list.
+        :return: a :class:`list` of all the values for the key.
+        :param as_bytes: return bytes instead of strings.
+        """
+        ikey = key.lower()
+        result = []
+        for k, v in self:
+            if k.lower() == ikey:
+                if as_bytes:
+                    v = v.encode("latin1")
+                if type is not None:
+                    try:
+                        v = type(v)
+                    except ValueError:
+                        continue
+                result.append(v)
+        return result
+
+    def get_all(self, name):
+        """Return a list of all the values for the named field.
+
+        This method is compatible with the :mod:`wsgiref`
+        :meth:`~wsgiref.headers.Headers.get_all` method.
+        """
+        return self.getlist(name)
+
+    def items(self, lower=False):
+        for key, value in self:
+            if lower:
+                key = key.lower()
+            yield key, value
+
+    def keys(self, lower=False):
+        for key, _ in self.items(lower):
+            yield key
+
+    def values(self):
+        for _, value in self.items():
+            yield value
+
+    def extend(self, *args, **kwargs):
+        """Extend headers in this object with items from another object
+        containing header items as well as keyword arguments.
+
+        To replace existing keys instead of extending, use
+        :meth:`update` instead.
+
+        If provided, the first argument can be another :class:`Headers`
+        object, a :class:`MultiDict`, :class:`dict`, or iterable of
+        pairs.
+
+        .. versionchanged:: 1.0
+            Support :class:`MultiDict`. Allow passing ``kwargs``.
+        """
+        if len(args) > 1:
+            raise TypeError(f"update expected at most 1 arguments, got {len(args)}")
+
+        if args:
+            for key, value in iter_multi_items(args[0]):
+                self.add(key, value)
+
+        for key, value in iter_multi_items(kwargs):
+            self.add(key, value)
+
+    def __delitem__(self, key, _index_operation=True):
+        if _index_operation and isinstance(key, (int, slice)):
+            del self._list[key]
+            return
+        key = key.lower()
+        new = []
+        for k, v in self._list:
+            if k.lower() != key:
+                new.append((k, v))
+        self._list[:] = new
+
+    def remove(self, key):
+        """Remove a key.
+
+        :param key: The key to be removed.
+        """
+        return self.__delitem__(key, _index_operation=False)
+
+    def pop(self, key=None, default=_missing):
+        """Removes and returns a key or index.
+
+        :param key: The key to be popped.  If this is an integer the item at
+                    that position is removed, if it's a string the value for
+                    that key is.  If the key is omitted or `None` the last
+                    item is removed.
+        :return: an item.
+        """
+        if key is None:
+            return self._list.pop()
+        if isinstance(key, int):
+            return self._list.pop(key)
+        try:
+            rv = self[key]
+            self.remove(key)
+        except KeyError:
+            if default is not _missing:
+                return default
+            raise
+        return rv
+
+    def popitem(self):
+        """Removes a key or index and returns a (key, value) item."""
+        return self.pop()
+
+    def __contains__(self, key):
+        """Check if a key is present."""
+        try:
+            self.__getitem__(key, _get_mode=True)
+        except KeyError:
+            return False
+        return True
+
+    def has_key(self, key):
+        """
+        .. deprecated:: 2.0
+            Will be removed in Werkzeug 2.1. Use ``key in data``
+            instead.
+        """
+        warnings.warn(
+            "'has_key' is deprecated and will be removed in Werkzeug"
+            " 2.1. Use 'key in data' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return key in self
+
+    def __iter__(self):
+        """Yield ``(key, value)`` tuples."""
+        return iter(self._list)
+
+    def __len__(self):
+        return len(self._list)
+
+    def add(self, _key, _value, **kw):
+        """Add a new header tuple to the list.
+
+        Keyword arguments can specify additional parameters for the header
+        value, with underscores converted to dashes::
+
+        >>> d = Headers()
+        >>> d.add('Content-Type', 'text/plain')
+        >>> d.add('Content-Disposition', 'attachment', filename='foo.png')
+
+        The keyword argument dumping uses :func:`dump_options_header`
+        behind the scenes.
+
+        .. versionadded:: 0.4.1
+            keyword arguments were added for :mod:`wsgiref` compatibility.
+        """
+        if kw:
+            _value = _options_header_vkw(_value, kw)
+        _key = _unicodify_header_value(_key)
+        _value = _unicodify_header_value(_value)
+        self._validate_value(_value)
+        self._list.append((_key, _value))
+
+    def _validate_value(self, value):
+        if not isinstance(value, str):
+            raise TypeError("Value should be a string.")
+        if "\n" in value or "\r" in value:
+            raise ValueError(
+                "Detected newline in header value.  This is "
+                "a potential security problem"
+            )
+
+    def add_header(self, _key, _value, **_kw):
+        """Add a new header tuple to the list.
+
+        An alias for :meth:`add` for compatibility with the :mod:`wsgiref`
+        :meth:`~wsgiref.headers.Headers.add_header` method.
+        """
+        self.add(_key, _value, **_kw)
+
+    def clear(self):
+        """Clears all headers."""
+        del self._list[:]
+
+    def set(self, _key, _value, **kw):
+        """Remove all header tuples for `key` and add a new one.  The newly
+        added key either appears at the end of the list if there was no
+        entry or replaces the first one.
+
+        Keyword arguments can specify additional parameters for the header
+        value, with underscores converted to dashes.  See :meth:`add` for
+        more information.
+
+        .. versionchanged:: 0.6.1
+           :meth:`set` now accepts the same arguments as :meth:`add`.
+
+        :param key: The key to be inserted.
+        :param value: The value to be inserted.
+        """
+        if kw:
+            _value = _options_header_vkw(_value, kw)
+        _key = _unicodify_header_value(_key)
+        _value = _unicodify_header_value(_value)
+        self._validate_value(_value)
+        if not self._list:
+            self._list.append((_key, _value))
+            return
+        listiter = iter(self._list)
+        ikey = _key.lower()
+        for idx, (old_key, _old_value) in enumerate(listiter):
+            if old_key.lower() == ikey:
+                # replace first occurrence
+                self._list[idx] = (_key, _value)
+                break
+        else:
+            self._list.append((_key, _value))
+            return
+        self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey]
+
+    def setlist(self, key, values):
+        """Remove any existing values for a header and add new ones.
+
+        :param key: The header key to set.
+        :param values: An iterable of values to set for the key.
+
+        .. versionadded:: 1.0
+        """
+        if values:
+            values_iter = iter(values)
+            self.set(key, next(values_iter))
+
+            for value in values_iter:
+                self.add(key, value)
+        else:
+            self.remove(key)
+
+    def setdefault(self, key, default):
+        """Return the first value for the key if it is in the headers,
+        otherwise set the header to the value given by ``default`` and
+        return that.
+
+        :param key: The header key to get.
+        :param default: The value to set for the key if it is not in the
+            headers.
+        """
+        if key in self:
+            return self[key]
+
+        self.set(key, default)
+        return default
+
+    def setlistdefault(self, key, default):
+        """Return the list of values for the key if it is in the
+        headers, otherwise set the header to the list of values given
+        by ``default`` and return that.
+
+        Unlike :meth:`MultiDict.setlistdefault`, modifying the returned
+        list will not affect the headers.
+
+        :param key: The header key to get.
+        :param default: An iterable of values to set for the key if it
+            is not in the headers.
+
+        .. versionadded:: 1.0
+        """
+        if key not in self:
+            self.setlist(key, default)
+
+        return self.getlist(key)
+
+    def __setitem__(self, key, value):
+        """Like :meth:`set` but also supports index/slice based setting."""
+        if isinstance(key, (slice, int)):
+            if isinstance(key, int):
+                value = [value]
+            value = [
+                (_unicodify_header_value(k), _unicodify_header_value(v))
+                for (k, v) in value
+            ]
+            for (_, v) in value:
+                self._validate_value(v)
+            if isinstance(key, int):
+                self._list[key] = value[0]
+            else:
+                self._list[key] = value
+        else:
+            self.set(key, value)
+
+    def update(self, *args, **kwargs):
+        """Replace headers in this object with items from another
+        headers object and keyword arguments.
+
+        To extend existing keys instead of replacing, use :meth:`extend`
+        instead.
+
+        If provided, the first argument can be another :class:`Headers`
+        object, a :class:`MultiDict`, :class:`dict`, or iterable of
+        pairs.
+
+        .. versionadded:: 1.0
+        """
+        if len(args) > 1:
+            raise TypeError(f"update expected at most 1 arguments, got {len(args)}")
+
+        if args:
+            mapping = args[0]
+
+            if isinstance(mapping, (Headers, MultiDict)):
+                for key in mapping.keys():
+                    self.setlist(key, mapping.getlist(key))
+            elif isinstance(mapping, dict):
+                for key, value in mapping.items():
+                    if isinstance(value, (list, tuple)):
+                        self.setlist(key, value)
+                    else:
+                        self.set(key, value)
+            else:
+                for key, value in mapping:
+                    self.set(key, value)
+
+        for key, value in kwargs.items():
+            if isinstance(value, (list, tuple)):
+                self.setlist(key, value)
+            else:
+                self.set(key, value)
+
+    def to_wsgi_list(self):
+        """Convert the headers into a list suitable for WSGI.
+
+        :return: list
+        """
+        return list(self)
+
+    def copy(self):
+        return self.__class__(self._list)
+
+    def __copy__(self):
+        return self.copy()
+
+    def __str__(self):
+        """Returns formatted headers suitable for HTTP transmission."""
+        strs = []
+        for key, value in self.to_wsgi_list():
+            strs.append(f"{key}: {value}")
+        strs.append("\r\n")
+        return "\r\n".join(strs)
+
+    def __repr__(self):
+        return f"{type(self).__name__}({list(self)!r})"
+
+
+class ImmutableHeadersMixin:
+    """Makes a :class:`Headers` immutable.  We do not mark them as
+    hashable though since the only usecase for this datastructure
+    in Werkzeug is a view on a mutable structure.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
+
+    def __delitem__(self, key, **kwargs):
+        is_immutable(self)
+
+    def __setitem__(self, key, value):
+        is_immutable(self)
+
+    def set(self, _key, _value, **kw):
+        is_immutable(self)
+
+    def setlist(self, key, values):
+        is_immutable(self)
+
+    def add(self, _key, _value, **kw):
+        is_immutable(self)
+
+    def add_header(self, _key, _value, **_kw):
+        is_immutable(self)
+
+    def remove(self, key):
+        is_immutable(self)
+
+    def extend(self, *args, **kwargs):
+        is_immutable(self)
+
+    def update(self, *args, **kwargs):
+        is_immutable(self)
+
+    def insert(self, pos, value):
+        is_immutable(self)
+
+    def pop(self, key=None, default=_missing):
+        is_immutable(self)
+
+    def popitem(self):
+        is_immutable(self)
+
+    def setdefault(self, key, default):
+        is_immutable(self)
+
+    def setlistdefault(self, key, default):
+        is_immutable(self)
+
+
+class EnvironHeaders(ImmutableHeadersMixin, Headers):
+    """Read only version of the headers from a WSGI environment.  This
+    provides the same interface as `Headers` and is constructed from
+    a WSGI environment.
+
+    From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
+    subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for
+    HTTP exceptions.
+    """
+
+    def __init__(self, environ):
+        self.environ = environ
+
+    def __eq__(self, other):
+        return self.environ is other.environ
+
+    __hash__ = None
+
+    def __getitem__(self, key, _get_mode=False):
+        # _get_mode is a no-op for this class as there is no index but
+        # used because get() calls it.
+        if not isinstance(key, str):
+            raise KeyError(key)
+        key = key.upper().replace("-", "_")
+        if key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+            return _unicodify_header_value(self.environ[key])
+        return _unicodify_header_value(self.environ[f"HTTP_{key}"])
+
+    def __len__(self):
+        # the iter is necessary because otherwise list calls our
+        # len which would call list again and so forth.
+        return len(list(iter(self)))
+
+    def __iter__(self):
+        for key, value in self.environ.items():
+            if key.startswith("HTTP_") and key not in (
+                "HTTP_CONTENT_TYPE",
+                "HTTP_CONTENT_LENGTH",
+            ):
+                yield (
+                    key[5:].replace("_", "-").title(),
+                    _unicodify_header_value(value),
+                )
+            elif key in ("CONTENT_TYPE", "CONTENT_LENGTH") and value:
+                yield (key.replace("_", "-").title(), _unicodify_header_value(value))
+
+    def copy(self):
+        raise TypeError(f"cannot create {type(self).__name__!r} copies")
+
+
+class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict):
+    """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict`
+    instances as sequence and it will combine the return values of all wrapped
+    dicts:
+
+    >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict
+    >>> post = MultiDict([('foo', 'bar')])
+    >>> get = MultiDict([('blub', 'blah')])
+    >>> combined = CombinedMultiDict([get, post])
+    >>> combined['foo']
+    'bar'
+    >>> combined['blub']
+    'blah'
+
+    This works for all read operations and will raise a `TypeError` for
+    methods that usually change data which isn't possible.
+
+    From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
+    subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
+    exceptions.
+    """
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (self.dicts,)
+
+    def __init__(self, dicts=None):
+        self.dicts = list(dicts) or []
+
+    @classmethod
+    def fromkeys(cls, keys, value=None):
+        raise TypeError(f"cannot create {cls.__name__!r} instances by fromkeys")
+
+    def __getitem__(self, key):
+        for d in self.dicts:
+            if key in d:
+                return d[key]
+        raise exceptions.BadRequestKeyError(key)
+
+    def get(self, key, default=None, type=None):
+        for d in self.dicts:
+            if key in d:
+                if type is not None:
+                    try:
+                        return type(d[key])
+                    except ValueError:
+                        continue
+                return d[key]
+        return default
+
+    def getlist(self, key, type=None):
+        rv = []
+        for d in self.dicts:
+            rv.extend(d.getlist(key, type))
+        return rv
+
+    def _keys_impl(self):
+        """This function exists so __len__ can be implemented more efficiently,
+        saving one list creation from an iterator.
+        """
+        rv = set()
+        rv.update(*self.dicts)
+        return rv
+
+    def keys(self):
+        return self._keys_impl()
+
+    def __iter__(self):
+        return iter(self.keys())
+
+    def items(self, multi=False):
+        found = set()
+        for d in self.dicts:
+            for key, value in d.items(multi):
+                if multi:
+                    yield key, value
+                elif key not in found:
+                    found.add(key)
+                    yield key, value
+
+    def values(self):
+        for _key, value in self.items():
+            yield value
+
+    def lists(self):
+        rv = {}
+        for d in self.dicts:
+            for key, values in d.lists():
+                rv.setdefault(key, []).extend(values)
+        return list(rv.items())
+
+    def listvalues(self):
+        return (x[1] for x in self.lists())
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.
+
+        This returns a :class:`MultiDict` representing the data at the
+        time of copying. The copy will no longer reflect changes to the
+        wrapped dicts.
+
+        .. versionchanged:: 0.15
+            Return a mutable :class:`MultiDict`.
+        """
+        return MultiDict(self)
+
+    def to_dict(self, flat=True):
+        """Return the contents as regular dict.  If `flat` is `True` the
+        returned dict will only have the first item present, if `flat` is
+        `False` all values will be returned as lists.
+
+        :param flat: If set to `False` the dict returned will have lists
+                     with all the values in it.  Otherwise it will only
+                     contain the first item for each key.
+        :return: a :class:`dict`
+        """
+        rv = {}
+        for d in reversed(self.dicts):
+            rv.update(d.to_dict(flat))
+        return rv
+
+    def __len__(self):
+        return len(self._keys_impl())
+
+    def __contains__(self, key):
+        for d in self.dicts:
+            if key in d:
+                return True
+        return False
+
+    def has_key(self, key):
+        """
+        .. deprecated:: 2.0
+            Will be removed in Werkzeug 2.1. Use ``key in data``
+            instead.
+        """
+        warnings.warn(
+            "'has_key' is deprecated and will be removed in Werkzeug"
+            " 2.1. Use 'key in data' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return key in self
+
+    def __repr__(self):
+        return f"{type(self).__name__}({self.dicts!r})"
+
+
+class FileMultiDict(MultiDict):
+    """A special :class:`MultiDict` that has convenience methods to add
+    files to it.  This is used for :class:`EnvironBuilder` and generally
+    useful for unittesting.
+
+    .. versionadded:: 0.5
+    """
+
+    def add_file(self, name, file, filename=None, content_type=None):
+        """Adds a new file to the dict.  `file` can be a file name or
+        a :class:`file`-like or a :class:`FileStorage` object.
+
+        :param name: the name of the field.
+        :param file: a filename or :class:`file`-like object
+        :param filename: an optional filename
+        :param content_type: an optional content type
+        """
+        if isinstance(file, FileStorage):
+            value = file
+        else:
+            if isinstance(file, str):
+                if filename is None:
+                    filename = file
+                file = open(file, "rb")
+            if filename and content_type is None:
+                content_type = (
+                    mimetypes.guess_type(filename)[0] or "application/octet-stream"
+                )
+            value = FileStorage(file, filename, name, content_type)
+
+        self.add(name, value)
+
+
+class ImmutableDict(ImmutableDictMixin, dict):
+    """An immutable :class:`dict`.
+
+    .. versionadded:: 0.5
+    """
+
+    def __repr__(self):
+        return f"{type(self).__name__}({dict.__repr__(self)})"
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.  Keep in mind that
+        the standard library's :func:`copy` function is a no-op for this class
+        like for any other python immutable type (eg: :class:`tuple`).
+        """
+        return dict(self)
+
+    def __copy__(self):
+        return self
+
+
+class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict):
+    """An immutable :class:`MultiDict`.
+
+    .. versionadded:: 0.5
+    """
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.  Keep in mind that
+        the standard library's :func:`copy` function is a no-op for this class
+        like for any other python immutable type (eg: :class:`tuple`).
+        """
+        return MultiDict(self)
+
+    def __copy__(self):
+        return self
+
+
+class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict):
+    """An immutable :class:`OrderedMultiDict`.
+
+    .. versionadded:: 0.6
+    """
+
+    def _iter_hashitems(self):
+        return enumerate(self.items(multi=True))
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.  Keep in mind that
+        the standard library's :func:`copy` function is a no-op for this class
+        like for any other python immutable type (eg: :class:`tuple`).
+        """
+        return OrderedMultiDict(self)
+
+    def __copy__(self):
+        return self
+
+
+class Accept(ImmutableList):
+    """An :class:`Accept` object is just a list subclass for lists of
+    ``(value, quality)`` tuples.  It is automatically sorted by specificity
+    and quality.
+
+    All :class:`Accept` objects work similar to a list but provide extra
+    functionality for working with the data.  Containment checks are
+    normalized to the rules of that header:
+
+    >>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)])
+    >>> a.best
+    'ISO-8859-1'
+    >>> 'iso-8859-1' in a
+    True
+    >>> 'UTF8' in a
+    True
+    >>> 'utf7' in a
+    False
+
+    To get the quality for an item you can use normal item lookup:
+
+    >>> print a['utf-8']
+    0.7
+    >>> a['utf7']
+    0
+
+    .. versionchanged:: 0.5
+       :class:`Accept` objects are forced immutable now.
+
+    .. versionchanged:: 1.0.0
+       :class:`Accept` internal values are no longer ordered
+       alphabetically for equal quality tags. Instead the initial
+       order is preserved.
+
+    """
+
+    def __init__(self, values=()):
+        if values is None:
+            list.__init__(self)
+            self.provided = False
+        elif isinstance(values, Accept):
+            self.provided = values.provided
+            list.__init__(self, values)
+        else:
+            self.provided = True
+            values = sorted(
+                values, key=lambda x: (self._specificity(x[0]), x[1]), reverse=True
+            )
+            list.__init__(self, values)
+
+    def _specificity(self, value):
+        """Returns a tuple describing the value's specificity."""
+        return (value != "*",)
+
+    def _value_matches(self, value, item):
+        """Check if a value matches a given accept item."""
+        return item == "*" or item.lower() == value.lower()
+
+    def __getitem__(self, key):
+        """Besides index lookup (getting item n) you can also pass it a string
+        to get the quality for the item.  If the item is not in the list, the
+        returned quality is ``0``.
+        """
+        if isinstance(key, str):
+            return self.quality(key)
+        return list.__getitem__(self, key)
+
+    def quality(self, key):
+        """Returns the quality of the key.
+
+        .. versionadded:: 0.6
+           In previous versions you had to use the item-lookup syntax
+           (eg: ``obj[key]`` instead of ``obj.quality(key)``)
+        """
+        for item, quality in self:
+            if self._value_matches(key, item):
+                return quality
+        return 0
+
+    def __contains__(self, value):
+        for item, _quality in self:
+            if self._value_matches(value, item):
+                return True
+        return False
+
+    def __repr__(self):
+        pairs_str = ", ".join(f"({x!r}, {y})" for x, y in self)
+        return f"{type(self).__name__}([{pairs_str}])"
+
+    def index(self, key):
+        """Get the position of an entry or raise :exc:`ValueError`.
+
+        :param key: The key to be looked up.
+
+        .. versionchanged:: 0.5
+           This used to raise :exc:`IndexError`, which was inconsistent
+           with the list API.
+        """
+        if isinstance(key, str):
+            for idx, (item, _quality) in enumerate(self):
+                if self._value_matches(key, item):
+                    return idx
+            raise ValueError(key)
+        return list.index(self, key)
+
+    def find(self, key):
+        """Get the position of an entry or return -1.
+
+        :param key: The key to be looked up.
+        """
+        try:
+            return self.index(key)
+        except ValueError:
+            return -1
+
+    def values(self):
+        """Iterate over all values."""
+        for item in self:
+            yield item[0]
+
+    def to_header(self):
+        """Convert the header set into an HTTP header string."""
+        result = []
+        for value, quality in self:
+            if quality != 1:
+                value = f"{value};q={quality}"
+            result.append(value)
+        return ",".join(result)
+
+    def __str__(self):
+        return self.to_header()
+
+    def _best_single_match(self, match):
+        for client_item, quality in self:
+            if self._value_matches(match, client_item):
+                # self is sorted by specificity descending, we can exit
+                return client_item, quality
+        return None
+
+    def best_match(self, matches, default=None):
+        """Returns the best match from a list of possible matches based
+        on the specificity and quality of the client. If two items have the
+        same quality and specificity, the one is returned that comes first.
+
+        :param matches: a list of matches to check for
+        :param default: the value that is returned if none match
+        """
+        result = default
+        best_quality = -1
+        best_specificity = (-1,)
+        for server_item in matches:
+            match = self._best_single_match(server_item)
+            if not match:
+                continue
+            client_item, quality = match
+            specificity = self._specificity(client_item)
+            if quality <= 0 or quality < best_quality:
+                continue
+            # better quality or same quality but more specific => better match
+            if quality > best_quality or specificity > best_specificity:
+                result = server_item
+                best_quality = quality
+                best_specificity = specificity
+        return result
+
+    @property
+    def best(self):
+        """The best match as value."""
+        if self:
+            return self[0][0]
+
+
+_mime_split_re = re.compile(r"/|(?:\s*;\s*)")
+
+
+def _normalize_mime(value):
+    return _mime_split_re.split(value.lower())
+
+
+class MIMEAccept(Accept):
+    """Like :class:`Accept` but with special methods and behavior for
+    mimetypes.
+    """
+
+    def _specificity(self, value):
+        return tuple(x != "*" for x in _mime_split_re.split(value))
+
+    def _value_matches(self, value, item):
+        # item comes from the client, can't match if it's invalid.
+        if "/" not in item:
+            return False
+
+        # value comes from the application, tell the developer when it
+        # doesn't look valid.
+        if "/" not in value:
+            raise ValueError(f"invalid mimetype {value!r}")
+
+        # Split the match value into type, subtype, and a sorted list of parameters.
+        normalized_value = _normalize_mime(value)
+        value_type, value_subtype = normalized_value[:2]
+        value_params = sorted(normalized_value[2:])
+
+        # "*/*" is the only valid value that can start with "*".
+        if value_type == "*" and value_subtype != "*":
+            raise ValueError(f"invalid mimetype {value!r}")
+
+        # Split the accept item into type, subtype, and parameters.
+        normalized_item = _normalize_mime(item)
+        item_type, item_subtype = normalized_item[:2]
+        item_params = sorted(normalized_item[2:])
+
+        # "*/not-*" from the client is invalid, can't match.
+        if item_type == "*" and item_subtype != "*":
+            return False
+
+        return (
+            (item_type == "*" and item_subtype == "*")
+            or (value_type == "*" and value_subtype == "*")
+        ) or (
+            item_type == value_type
+            and (
+                item_subtype == "*"
+                or value_subtype == "*"
+                or (item_subtype == value_subtype and item_params == value_params)
+            )
+        )
+
+    @property
+    def accept_html(self):
+        """True if this object accepts HTML."""
+        return (
+            "text/html" in self or "application/xhtml+xml" in self or self.accept_xhtml
+        )
+
+    @property
+    def accept_xhtml(self):
+        """True if this object accepts XHTML."""
+        return "application/xhtml+xml" in self or "application/xml" in self
+
+    @property
+    def accept_json(self):
+        """True if this object accepts JSON."""
+        return "application/json" in self
+
+
+_locale_delim_re = re.compile(r"[_-]")
+
+
+def _normalize_lang(value):
+    """Process a language tag for matching."""
+    return _locale_delim_re.split(value.lower())
+
+
+class LanguageAccept(Accept):
+    """Like :class:`Accept` but with normalization for language tags."""
+
+    def _value_matches(self, value, item):
+        return item == "*" or _normalize_lang(value) == _normalize_lang(item)
+
+    def best_match(self, matches, default=None):
+        """Given a list of supported values, finds the best match from
+        the list of accepted values.
+
+        Language tags are normalized for the purpose of matching, but
+        are returned unchanged.
+
+        If no exact match is found, this will fall back to matching
+        the first subtag (primary language only), first with the
+        accepted values then with the match values. This partial is not
+        applied to any other language subtags.
+
+        The default is returned if no exact or fallback match is found.
+
+        :param matches: A list of supported languages to find a match.
+        :param default: The value that is returned if none match.
+        """
+        # Look for an exact match first. If a client accepts "en-US",
+        # "en-US" is a valid match at this point.
+        result = super().best_match(matches)
+
+        if result is not None:
+            return result
+
+        # Fall back to accepting primary tags. If a client accepts
+        # "en-US", "en" is a valid match at this point. Need to use
+        # re.split to account for 2 or 3 letter codes.
+        fallback = Accept(
+            [(_locale_delim_re.split(item[0], 1)[0], item[1]) for item in self]
+        )
+        result = fallback.best_match(matches)
+
+        if result is not None:
+            return result
+
+        # Fall back to matching primary tags. If the client accepts
+        # "en", "en-US" is a valid match at this point.
+        fallback_matches = [_locale_delim_re.split(item, 1)[0] for item in matches]
+        result = super().best_match(fallback_matches)
+
+        # Return a value from the original match list. Find the first
+        # original value that starts with the matched primary tag.
+        if result is not None:
+            return next(item for item in matches if item.startswith(result))
+
+        return default
+
+
+class CharsetAccept(Accept):
+    """Like :class:`Accept` but with normalization for charsets."""
+
+    def _value_matches(self, value, item):
+        def _normalize(name):
+            try:
+                return codecs.lookup(name).name
+            except LookupError:
+                return name.lower()
+
+        return item == "*" or _normalize(value) == _normalize(item)
+
+
+def cache_control_property(key, empty, type):
+    """Return a new property object for a cache header. Useful if you
+    want to add support for a cache extension in a subclass.
+
+    .. versionchanged:: 2.0
+        Renamed from ``cache_property``.
+    """
+    return property(
+        lambda x: x._get_cache_value(key, empty, type),
+        lambda x, v: x._set_cache_value(key, v, type),
+        lambda x: x._del_cache_value(key),
+        f"accessor for {key!r}",
+    )
+
+
+def cache_property(key, empty, type):
+    warnings.warn(
+        "'cache_property' is renamed to 'cache_control_property'. The"
+        " old name is deprecated and will be removed in Werkzeug 2.1.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return cache_control_property(key, empty, type)
+
+
+class _CacheControl(UpdateDictMixin, dict):
+    """Subclass of a dict that stores values for a Cache-Control header.  It
+    has accessors for all the cache-control directives specified in RFC 2616.
+    The class does not differentiate between request and response directives.
+
+    Because the cache-control directives in the HTTP header use dashes the
+    python descriptors use underscores for that.
+
+    To get a header of the :class:`CacheControl` object again you can convert
+    the object into a string or call the :meth:`to_header` method.  If you plan
+    to subclass it and add your own items have a look at the sourcecode for
+    that class.
+
+    .. versionchanged:: 0.4
+
+       Setting `no_cache` or `private` to boolean `True` will set the implicit
+       none-value which is ``*``:
+
+       >>> cc = ResponseCacheControl()
+       >>> cc.no_cache = True
+       >>> cc
+       <ResponseCacheControl 'no-cache'>
+       >>> cc.no_cache
+       '*'
+       >>> cc.no_cache = None
+       >>> cc
+       <ResponseCacheControl ''>
+
+       In versions before 0.5 the behavior documented here affected the now
+       no longer existing `CacheControl` class.
+    """
+
+    no_cache = cache_control_property("no-cache", "*", None)
+    no_store = cache_control_property("no-store", None, bool)
+    max_age = cache_control_property("max-age", -1, int)
+    no_transform = cache_control_property("no-transform", None, None)
+
+    def __init__(self, values=(), on_update=None):
+        dict.__init__(self, values or ())
+        self.on_update = on_update
+        self.provided = values is not None
+
+    def _get_cache_value(self, key, empty, type):
+        """Used internally by the accessor properties."""
+        if type is bool:
+            return key in self
+        if key in self:
+            value = self[key]
+            if value is None:
+                return empty
+            elif type is not None:
+                try:
+                    value = type(value)
+                except ValueError:
+                    pass
+            return value
+        return None
+
+    def _set_cache_value(self, key, value, type):
+        """Used internally by the accessor properties."""
+        if type is bool:
+            if value:
+                self[key] = None
+            else:
+                self.pop(key, None)
+        else:
+            if value is None:
+                self.pop(key, None)
+            elif value is True:
+                self[key] = None
+            else:
+                self[key] = value
+
+    def _del_cache_value(self, key):
+        """Used internally by the accessor properties."""
+        if key in self:
+            del self[key]
+
+    def to_header(self):
+        """Convert the stored values into a cache control header."""
+        return http.dump_header(self)
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items()))
+        return f"<{type(self).__name__} {kv_str}>"
+
+    cache_property = staticmethod(cache_control_property)
+
+
+class RequestCacheControl(ImmutableDictMixin, _CacheControl):
+    """A cache control for requests.  This is immutable and gives access
+    to all the request-relevant cache control headers.
+
+    To get a header of the :class:`RequestCacheControl` object again you can
+    convert the object into a string or call the :meth:`to_header` method.  If
+    you plan to subclass it and add your own items have a look at the sourcecode
+    for that class.
+
+    .. versionadded:: 0.5
+       In previous versions a `CacheControl` class existed that was used
+       both for request and response.
+    """
+
+    max_stale = cache_control_property("max-stale", "*", int)
+    min_fresh = cache_control_property("min-fresh", "*", int)
+    only_if_cached = cache_control_property("only-if-cached", None, bool)
+
+
+class ResponseCacheControl(_CacheControl):
+    """A cache control for responses.  Unlike :class:`RequestCacheControl`
+    this is mutable and gives access to response-relevant cache control
+    headers.
+
+    To get a header of the :class:`ResponseCacheControl` object again you can
+    convert the object into a string or call the :meth:`to_header` method.  If
+    you plan to subclass it and add your own items have a look at the sourcecode
+    for that class.
+
+    .. versionadded:: 0.5
+       In previous versions a `CacheControl` class existed that was used
+       both for request and response.
+    """
+
+    public = cache_control_property("public", None, bool)
+    private = cache_control_property("private", "*", None)
+    must_revalidate = cache_control_property("must-revalidate", None, bool)
+    proxy_revalidate = cache_control_property("proxy-revalidate", None, bool)
+    s_maxage = cache_control_property("s-maxage", None, None)
+    immutable = cache_control_property("immutable", None, bool)
+
+
+def csp_property(key):
+    """Return a new property object for a content security policy header.
+    Useful if you want to add support for a csp extension in a
+    subclass.
+    """
+    return property(
+        lambda x: x._get_value(key),
+        lambda x, v: x._set_value(key, v),
+        lambda x: x._del_value(key),
+        f"accessor for {key!r}",
+    )
+
+
+class ContentSecurityPolicy(UpdateDictMixin, dict):
+    """Subclass of a dict that stores values for a Content Security Policy
+    header. It has accessors for all the level 3 policies.
+
+    Because the csp directives in the HTTP header use dashes the
+    python descriptors use underscores for that.
+
+    To get a header of the :class:`ContentSecuirtyPolicy` object again
+    you can convert the object into a string or call the
+    :meth:`to_header` method.  If you plan to subclass it and add your
+    own items have a look at the sourcecode for that class.
+
+    .. versionadded:: 1.0.0
+       Support for Content Security Policy headers was added.
+
+    """
+
+    base_uri = csp_property("base-uri")
+    child_src = csp_property("child-src")
+    connect_src = csp_property("connect-src")
+    default_src = csp_property("default-src")
+    font_src = csp_property("font-src")
+    form_action = csp_property("form-action")
+    frame_ancestors = csp_property("frame-ancestors")
+    frame_src = csp_property("frame-src")
+    img_src = csp_property("img-src")
+    manifest_src = csp_property("manifest-src")
+    media_src = csp_property("media-src")
+    navigate_to = csp_property("navigate-to")
+    object_src = csp_property("object-src")
+    prefetch_src = csp_property("prefetch-src")
+    plugin_types = csp_property("plugin-types")
+    report_to = csp_property("report-to")
+    report_uri = csp_property("report-uri")
+    sandbox = csp_property("sandbox")
+    script_src = csp_property("script-src")
+    script_src_attr = csp_property("script-src-attr")
+    script_src_elem = csp_property("script-src-elem")
+    style_src = csp_property("style-src")
+    style_src_attr = csp_property("style-src-attr")
+    style_src_elem = csp_property("style-src-elem")
+    worker_src = csp_property("worker-src")
+
+    def __init__(self, values=(), on_update=None):
+        dict.__init__(self, values or ())
+        self.on_update = on_update
+        self.provided = values is not None
+
+    def _get_value(self, key):
+        """Used internally by the accessor properties."""
+        return self.get(key)
+
+    def _set_value(self, key, value):
+        """Used internally by the accessor properties."""
+        if value is None:
+            self.pop(key, None)
+        else:
+            self[key] = value
+
+    def _del_value(self, key):
+        """Used internally by the accessor properties."""
+        if key in self:
+            del self[key]
+
+    def to_header(self):
+        """Convert the stored values into a cache control header."""
+        return http.dump_csp_header(self)
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items()))
+        return f"<{type(self).__name__} {kv_str}>"
+
+
+class CallbackDict(UpdateDictMixin, dict):
+    """A dict that calls a function passed every time something is changed.
+    The function is passed the dict instance.
+    """
+
+    def __init__(self, initial=None, on_update=None):
+        dict.__init__(self, initial or ())
+        self.on_update = on_update
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {dict.__repr__(self)}>"
+
+
+class HeaderSet(MutableSet):
+    """Similar to the :class:`ETags` class this implements a set-like structure.
+    Unlike :class:`ETags` this is case insensitive and used for vary, allow, and
+    content-language headers.
+
+    If not constructed using the :func:`parse_set_header` function the
+    instantiation works like this:
+
+    >>> hs = HeaderSet(['foo', 'bar', 'baz'])
+    >>> hs
+    HeaderSet(['foo', 'bar', 'baz'])
+    """
+
+    def __init__(self, headers=None, on_update=None):
+        self._headers = list(headers or ())
+        self._set = {x.lower() for x in self._headers}
+        self.on_update = on_update
+
+    def add(self, header):
+        """Add a new header to the set."""
+        self.update((header,))
+
+    def remove(self, header):
+        """Remove a header from the set.  This raises an :exc:`KeyError` if the
+        header is not in the set.
+
+        .. versionchanged:: 0.5
+            In older versions a :exc:`IndexError` was raised instead of a
+            :exc:`KeyError` if the object was missing.
+
+        :param header: the header to be removed.
+        """
+        key = header.lower()
+        if key not in self._set:
+            raise KeyError(header)
+        self._set.remove(key)
+        for idx, key in enumerate(self._headers):
+            if key.lower() == header:
+                del self._headers[idx]
+                break
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def update(self, iterable):
+        """Add all the headers from the iterable to the set.
+
+        :param iterable: updates the set with the items from the iterable.
+        """
+        inserted_any = False
+        for header in iterable:
+            key = header.lower()
+            if key not in self._set:
+                self._headers.append(header)
+                self._set.add(key)
+                inserted_any = True
+        if inserted_any and self.on_update is not None:
+            self.on_update(self)
+
+    def discard(self, header):
+        """Like :meth:`remove` but ignores errors.
+
+        :param header: the header to be discarded.
+        """
+        try:
+            self.remove(header)
+        except KeyError:
+            pass
+
+    def find(self, header):
+        """Return the index of the header in the set or return -1 if not found.
+
+        :param header: the header to be looked up.
+        """
+        header = header.lower()
+        for idx, item in enumerate(self._headers):
+            if item.lower() == header:
+                return idx
+        return -1
+
+    def index(self, header):
+        """Return the index of the header in the set or raise an
+        :exc:`IndexError`.
+
+        :param header: the header to be looked up.
+        """
+        rv = self.find(header)
+        if rv < 0:
+            raise IndexError(header)
+        return rv
+
+    def clear(self):
+        """Clear the set."""
+        self._set.clear()
+        del self._headers[:]
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def as_set(self, preserve_casing=False):
+        """Return the set as real python set type.  When calling this, all
+        the items are converted to lowercase and the ordering is lost.
+
+        :param preserve_casing: if set to `True` the items in the set returned
+                                will have the original case like in the
+                                :class:`HeaderSet`, otherwise they will
+                                be lowercase.
+        """
+        if preserve_casing:
+            return set(self._headers)
+        return set(self._set)
+
+    def to_header(self):
+        """Convert the header set into an HTTP header string."""
+        return ", ".join(map(http.quote_header_value, self._headers))
+
+    def __getitem__(self, idx):
+        return self._headers[idx]
+
+    def __delitem__(self, idx):
+        rv = self._headers.pop(idx)
+        self._set.remove(rv.lower())
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def __setitem__(self, idx, value):
+        old = self._headers[idx]
+        self._set.remove(old.lower())
+        self._headers[idx] = value
+        self._set.add(value.lower())
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def __contains__(self, header):
+        return header.lower() in self._set
+
+    def __len__(self):
+        return len(self._set)
+
+    def __iter__(self):
+        return iter(self._headers)
+
+    def __bool__(self):
+        return bool(self._set)
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return f"{type(self).__name__}({self._headers!r})"
+
+
+class ETags(Collection):
+    """A set that can be used to check if one etag is present in a collection
+    of etags.
+    """
+
+    def __init__(self, strong_etags=None, weak_etags=None, star_tag=False):
+        if not star_tag and strong_etags:
+            self._strong = frozenset(strong_etags)
+        else:
+            self._strong = frozenset()
+
+        self._weak = frozenset(weak_etags or ())
+        self.star_tag = star_tag
+
+    def as_set(self, include_weak=False):
+        """Convert the `ETags` object into a python set.  Per default all the
+        weak etags are not part of this set."""
+        rv = set(self._strong)
+        if include_weak:
+            rv.update(self._weak)
+        return rv
+
+    def is_weak(self, etag):
+        """Check if an etag is weak."""
+        return etag in self._weak
+
+    def is_strong(self, etag):
+        """Check if an etag is strong."""
+        return etag in self._strong
+
+    def contains_weak(self, etag):
+        """Check if an etag is part of the set including weak and strong tags."""
+        return self.is_weak(etag) or self.contains(etag)
+
+    def contains(self, etag):
+        """Check if an etag is part of the set ignoring weak tags.
+        It is also possible to use the ``in`` operator.
+        """
+        if self.star_tag:
+            return True
+        return self.is_strong(etag)
+
+    def contains_raw(self, etag):
+        """When passed a quoted tag it will check if this tag is part of the
+        set.  If the tag is weak it is checked against weak and strong tags,
+        otherwise strong only."""
+        etag, weak = http.unquote_etag(etag)
+        if weak:
+            return self.contains_weak(etag)
+        return self.contains(etag)
+
+    def to_header(self):
+        """Convert the etags set into a HTTP header string."""
+        if self.star_tag:
+            return "*"
+        return ", ".join(
+            [f'"{x}"' for x in self._strong] + [f'W/"{x}"' for x in self._weak]
+        )
+
+    def __call__(self, etag=None, data=None, include_weak=False):
+        if [etag, data].count(None) != 1:
+            raise TypeError("either tag or data required, but at least one")
+        if etag is None:
+            etag = http.generate_etag(data)
+        if include_weak:
+            if etag in self._weak:
+                return True
+        return etag in self._strong
+
+    def __bool__(self):
+        return bool(self.star_tag or self._strong or self._weak)
+
+    def __str__(self):
+        return self.to_header()
+
+    def __len__(self):
+        return len(self._strong)
+
+    def __iter__(self):
+        return iter(self._strong)
+
+    def __contains__(self, etag):
+        return self.contains(etag)
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {str(self)!r}>"
+
+
+class IfRange:
+    """Very simple object that represents the `If-Range` header in parsed
+    form.  It will either have neither a etag or date or one of either but
+    never both.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, etag=None, date=None):
+        #: The etag parsed and unquoted.  Ranges always operate on strong
+        #: etags so the weakness information is not necessary.
+        self.etag = etag
+        #: The date in parsed format or `None`.
+        self.date = date
+
+    def to_header(self):
+        """Converts the object back into an HTTP header."""
+        if self.date is not None:
+            return http.http_date(self.date)
+        if self.etag is not None:
+            return http.quote_etag(self.etag)
+        return ""
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {str(self)!r}>"
+
+
+class Range:
+    """Represents a ``Range`` header. All methods only support only
+    bytes as the unit. Stores a list of ranges if given, but the methods
+    only work if only one range is provided.
+
+    :raise ValueError: If the ranges provided are invalid.
+
+    .. versionchanged:: 0.15
+        The ranges passed in are validated.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, units, ranges):
+        #: The units of this range.  Usually "bytes".
+        self.units = units
+        #: A list of ``(begin, end)`` tuples for the range header provided.
+        #: The ranges are non-inclusive.
+        self.ranges = ranges
+
+        for start, end in ranges:
+            if start is None or (end is not None and (start < 0 or start >= end)):
+                raise ValueError(f"{(start, end)} is not a valid range.")
+
+    def range_for_length(self, length):
+        """If the range is for bytes, the length is not None and there is
+        exactly one range and it is satisfiable it returns a ``(start, stop)``
+        tuple, otherwise `None`.
+        """
+        if self.units != "bytes" or length is None or len(self.ranges) != 1:
+            return None
+        start, end = self.ranges[0]
+        if end is None:
+            end = length
+            if start < 0:
+                start += length
+        if http.is_byte_range_valid(start, end, length):
+            return start, min(end, length)
+        return None
+
+    def make_content_range(self, length):
+        """Creates a :class:`~werkzeug.datastructures.ContentRange` object
+        from the current range and given content length.
+        """
+        rng = self.range_for_length(length)
+        if rng is not None:
+            return ContentRange(self.units, rng[0], rng[1], length)
+        return None
+
+    def to_header(self):
+        """Converts the object back into an HTTP header."""
+        ranges = []
+        for begin, end in self.ranges:
+            if end is None:
+                ranges.append(f"{begin}-" if begin >= 0 else str(begin))
+            else:
+                ranges.append(f"{begin}-{end - 1}")
+        return f"{self.units}={','.join(ranges)}"
+
+    def to_content_range_header(self, length):
+        """Converts the object into `Content-Range` HTTP header,
+        based on given length
+        """
+        range = self.range_for_length(length)
+        if range is not None:
+            return f"{self.units} {range[0]}-{range[1] - 1}/{length}"
+        return None
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {str(self)!r}>"
+
+
+def _callback_property(name):
+    def fget(self):
+        return getattr(self, name)
+
+    def fset(self, value):
+        setattr(self, name, value)
+        if self.on_update is not None:
+            self.on_update(self)
+
+    return property(fget, fset)
+
+
+class ContentRange:
+    """Represents the content range header.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, units, start, stop, length=None, on_update=None):
+        assert http.is_byte_range_valid(start, stop, length), "Bad range provided"
+        self.on_update = on_update
+        self.set(start, stop, length, units)
+
+    #: The units to use, usually "bytes"
+    units = _callback_property("_units")
+    #: The start point of the range or `None`.
+    start = _callback_property("_start")
+    #: The stop point of the range (non-inclusive) or `None`.  Can only be
+    #: `None` if also start is `None`.
+    stop = _callback_property("_stop")
+    #: The length of the range or `None`.
+    length = _callback_property("_length")
+
+    def set(self, start, stop, length=None, units="bytes"):
+        """Simple method to update the ranges."""
+        assert http.is_byte_range_valid(start, stop, length), "Bad range provided"
+        self._units = units
+        self._start = start
+        self._stop = stop
+        self._length = length
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def unset(self):
+        """Sets the units to `None` which indicates that the header should
+        no longer be used.
+        """
+        self.set(None, None, units=None)
+
+    def to_header(self):
+        if self.units is None:
+            return ""
+        if self.length is None:
+            length = "*"
+        else:
+            length = self.length
+        if self.start is None:
+            return f"{self.units} */{length}"
+        return f"{self.units} {self.start}-{self.stop - 1}/{length}"
+
+    def __bool__(self):
+        return self.units is not None
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {str(self)!r}>"
+
+
+class Authorization(ImmutableDictMixin, dict):
+    """Represents an ``Authorization`` header sent by the client.
+
+    This is returned by
+    :func:`~werkzeug.http.parse_authorization_header`. It can be useful
+    to create the object manually to pass to the test
+    :class:`~werkzeug.test.Client`.
+
+    .. versionchanged:: 0.5
+        This object became immutable.
+    """
+
+    def __init__(self, auth_type, data=None):
+        dict.__init__(self, data or {})
+        self.type = auth_type
+
+    @property
+    def username(self):
+        """The username transmitted.  This is set for both basic and digest
+        auth all the time.
+        """
+        return self.get("username")
+
+    @property
+    def password(self):
+        """When the authentication type is basic this is the password
+        transmitted by the client, else `None`.
+        """
+        return self.get("password")
+
+    @property
+    def realm(self):
+        """This is the server realm sent back for HTTP digest auth."""
+        return self.get("realm")
+
+    @property
+    def nonce(self):
+        """The nonce the server sent for digest auth, sent back by the client.
+        A nonce should be unique for every 401 response for HTTP digest auth.
+        """
+        return self.get("nonce")
+
+    @property
+    def uri(self):
+        """The URI from Request-URI of the Request-Line; duplicated because
+        proxies are allowed to change the Request-Line in transit.  HTTP
+        digest auth only.
+        """
+        return self.get("uri")
+
+    @property
+    def nc(self):
+        """The nonce count value transmitted by clients if a qop-header is
+        also transmitted.  HTTP digest auth only.
+        """
+        return self.get("nc")
+
+    @property
+    def cnonce(self):
+        """If the server sent a qop-header in the ``WWW-Authenticate``
+        header, the client has to provide this value for HTTP digest auth.
+        See the RFC for more details.
+        """
+        return self.get("cnonce")
+
+    @property
+    def response(self):
+        """A string of 32 hex digits computed as defined in RFC 2617, which
+        proves that the user knows a password.  Digest auth only.
+        """
+        return self.get("response")
+
+    @property
+    def opaque(self):
+        """The opaque header from the server returned unchanged by the client.
+        It is recommended that this string be base64 or hexadecimal data.
+        Digest auth only.
+        """
+        return self.get("opaque")
+
+    @property
+    def qop(self):
+        """Indicates what "quality of protection" the client has applied to
+        the message for HTTP digest auth. Note that this is a single token,
+        not a quoted list of alternatives as in WWW-Authenticate.
+        """
+        return self.get("qop")
+
+    def to_header(self):
+        """Convert to a string value for an ``Authorization`` header.
+
+        .. versionadded:: 2.0
+            Added to support passing authorization to the test client.
+        """
+        if self.type == "basic":
+            value = base64.b64encode(
+                f"{self.username}:{self.password}".encode("utf8")
+            ).decode("utf8")
+            return f"Basic {value}"
+
+        if self.type == "digest":
+            return f"Digest {http.dump_header(self)}"
+
+        raise ValueError(f"Unsupported type {self.type!r}.")
+
+
+def auth_property(name, doc=None):
+    """A static helper function for Authentication subclasses to add
+    extra authentication system properties onto a class::
+
+        class FooAuthenticate(WWWAuthenticate):
+            special_realm = auth_property('special_realm')
+
+    For more information have a look at the sourcecode to see how the
+    regular properties (:attr:`realm` etc.) are implemented.
+    """
+
+    def _set_value(self, value):
+        if value is None:
+            self.pop(name, None)
+        else:
+            self[name] = str(value)
+
+    return property(lambda x: x.get(name), _set_value, doc=doc)
+
+
+def _set_property(name, doc=None):
+    def fget(self):
+        def on_update(header_set):
+            if not header_set and name in self:
+                del self[name]
+            elif header_set:
+                self[name] = header_set.to_header()
+
+        return http.parse_set_header(self.get(name), on_update)
+
+    return property(fget, doc=doc)
+
+
+class WWWAuthenticate(UpdateDictMixin, dict):
+    """Provides simple access to `WWW-Authenticate` headers."""
+
+    #: list of keys that require quoting in the generated header
+    _require_quoting = frozenset(["domain", "nonce", "opaque", "realm", "qop"])
+
+    def __init__(self, auth_type=None, values=None, on_update=None):
+        dict.__init__(self, values or ())
+        if auth_type:
+            self["__auth_type__"] = auth_type
+        self.on_update = on_update
+
+    def set_basic(self, realm="authentication required"):
+        """Clear the auth info and enable basic auth."""
+        dict.clear(self)
+        dict.update(self, {"__auth_type__": "basic", "realm": realm})
+        if self.on_update:
+            self.on_update(self)
+
+    def set_digest(
+        self, realm, nonce, qop=("auth",), opaque=None, algorithm=None, stale=False
+    ):
+        """Clear the auth info and enable digest auth."""
+        d = {
+            "__auth_type__": "digest",
+            "realm": realm,
+            "nonce": nonce,
+            "qop": http.dump_header(qop),
+        }
+        if stale:
+            d["stale"] = "TRUE"
+        if opaque is not None:
+            d["opaque"] = opaque
+        if algorithm is not None:
+            d["algorithm"] = algorithm
+        dict.clear(self)
+        dict.update(self, d)
+        if self.on_update:
+            self.on_update(self)
+
+    def to_header(self):
+        """Convert the stored values into a WWW-Authenticate header."""
+        d = dict(self)
+        auth_type = d.pop("__auth_type__", None) or "basic"
+        kv_items = (
+            (k, http.quote_header_value(v, allow_token=k not in self._require_quoting))
+            for k, v in d.items()
+        )
+        kv_string = ", ".join([f"{k}={v}" for k, v in kv_items])
+        return f"{auth_type.title()} {kv_string}"
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return f"<{type(self).__name__} {self.to_header()!r}>"
+
+    type = auth_property(
+        "__auth_type__",
+        doc="""The type of the auth mechanism. HTTP currently specifies
+        ``Basic`` and ``Digest``.""",
+    )
+    realm = auth_property(
+        "realm",
+        doc="""A string to be displayed to users so they know which
+        username and password to use. This string should contain at
+        least the name of the host performing the authentication and
+        might additionally indicate the collection of users who might
+        have access.""",
+    )
+    domain = _set_property(
+        "domain",
+        doc="""A list of URIs that define the protection space. If a URI
+        is an absolute path, it is relative to the canonical root URL of
+        the server being accessed.""",
+    )
+    nonce = auth_property(
+        "nonce",
+        doc="""
+        A server-specified data string which should be uniquely generated
+        each time a 401 response is made. It is recommended that this
+        string be base64 or hexadecimal data.""",
+    )
+    opaque = auth_property(
+        "opaque",
+        doc="""A string of data, specified by the server, which should
+        be returned by the client unchanged in the Authorization header
+        of subsequent requests with URIs in the same protection space.
+        It is recommended that this string be base64 or hexadecimal
+        data.""",
+    )
+    algorithm = auth_property(
+        "algorithm",
+        doc="""A string indicating a pair of algorithms used to produce
+        the digest and a checksum. If this is not present it is assumed
+        to be "MD5". If the algorithm is not understood, the challenge
+        should be ignored (and a different one used, if there is more
+        than one).""",
+    )
+    qop = _set_property(
+        "qop",
+        doc="""A set of quality-of-privacy directives such as auth and
+        auth-int.""",
+    )
+
+    @property
+    def stale(self):
+        """A flag, indicating that the previous request from the client
+        was rejected because the nonce value was stale.
+        """
+        val = self.get("stale")
+        if val is not None:
+            return val.lower() == "true"
+
+    @stale.setter
+    def stale(self, value):
+        if value is None:
+            self.pop("stale", None)
+        else:
+            self["stale"] = "TRUE" if value else "FALSE"
+
+    auth_property = staticmethod(auth_property)
+
+
+class FileStorage:
+    """The :class:`FileStorage` class is a thin wrapper over incoming files.
+    It is used by the request object to represent uploaded files.  All the
+    attributes of the wrapper stream are proxied by the file storage so
+    it's possible to do ``storage.read()`` instead of the long form
+    ``storage.stream.read()``.
+    """
+
+    def __init__(
+        self,
+        stream=None,
+        filename=None,
+        name=None,
+        content_type=None,
+        content_length=None,
+        headers=None,
+    ):
+        self.name = name
+        self.stream = stream or BytesIO()
+
+        # if no filename is provided we can attempt to get the filename
+        # from the stream object passed.  There we have to be careful to
+        # skip things like <fdopen>, <stderr> etc.  Python marks these
+        # special filenames with angular brackets.
+        if filename is None:
+            filename = getattr(stream, "name", None)
+            s = _make_encode_wrapper(filename)
+            if filename and filename[0] == s("<") and filename[-1] == s(">"):
+                filename = None
+
+            # Make sure the filename is not bytes. This might happen if
+            # the file was opened from the bytes API.
+            if isinstance(filename, bytes):
+                filename = filename.decode(get_filesystem_encoding(), "replace")
+
+        self.filename = filename
+        if headers is None:
+            headers = Headers()
+        self.headers = headers
+        if content_type is not None:
+            headers["Content-Type"] = content_type
+        if content_length is not None:
+            headers["Content-Length"] = str(content_length)
+
+    def _parse_content_type(self):
+        if not hasattr(self, "_parsed_content_type"):
+            self._parsed_content_type = http.parse_options_header(self.content_type)
+
+    @property
+    def content_type(self):
+        """The content-type sent in the header.  Usually not available"""
+        return self.headers.get("content-type")
+
+    @property
+    def content_length(self):
+        """The content-length sent in the header.  Usually not available"""
+        return int(self.headers.get("content-length") or 0)
+
+    @property
+    def mimetype(self):
+        """Like :attr:`content_type`, but without parameters (eg, without
+        charset, type etc.) and always lowercase.  For example if the content
+        type is ``text/HTML; charset=utf-8`` the mimetype would be
+        ``'text/html'``.
+
+        .. versionadded:: 0.7
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[0].lower()
+
+    @property
+    def mimetype_params(self):
+        """The mimetype parameters as dict.  For example if the content
+        type is ``text/html; charset=utf-8`` the params would be
+        ``{'charset': 'utf-8'}``.
+
+        .. versionadded:: 0.7
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[1]
+
+    def save(self, dst, buffer_size=16384):
+        """Save the file to a destination path or file object.  If the
+        destination is a file object you have to close it yourself after the
+        call.  The buffer size is the number of bytes held in memory during
+        the copy process.  It defaults to 16KB.
+
+        For secure file saving also have a look at :func:`secure_filename`.
+
+        :param dst: a filename, :class:`os.PathLike`, or open file
+            object to write to.
+        :param buffer_size: Passed as the ``length`` parameter of
+            :func:`shutil.copyfileobj`.
+
+        .. versionchanged:: 1.0
+            Supports :mod:`pathlib`.
+        """
+        from shutil import copyfileobj
+
+        close_dst = False
+
+        if hasattr(dst, "__fspath__"):
+            dst = fspath(dst)
+
+        if isinstance(dst, str):
+            dst = open(dst, "wb")
+            close_dst = True
+
+        try:
+            copyfileobj(self.stream, dst, buffer_size)
+        finally:
+            if close_dst:
+                dst.close()
+
+    def close(self):
+        """Close the underlying file if possible."""
+        try:
+            self.stream.close()
+        except Exception:
+            pass
+
+    def __bool__(self):
+        return bool(self.filename)
+
+    def __getattr__(self, name):
+        try:
+            return getattr(self.stream, name)
+        except AttributeError:
+            # SpooledTemporaryFile doesn't implement IOBase, get the
+            # attribute from its backing file instead.
+            # https://github.com/python/cpython/pull/3249
+            if hasattr(self.stream, "_file"):
+                return getattr(self.stream._file, name)
+            raise
+
+    def __iter__(self):
+        return iter(self.stream)
+
+    def __repr__(self):
+        return f"<{type(self).__name__}: {self.filename!r} ({self.content_type!r})>"
+
+
+# circular dependencies
+from . import http
diff --git a/venv/lib/python3.7/site-packages/werkzeug/datastructures.pyi b/venv/lib/python3.7/site-packages/werkzeug/datastructures.pyi
new file mode 100644
index 00000000..b61540ec
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/datastructures.pyi
@@ -0,0 +1,906 @@
+from datetime import datetime
+from os import PathLike
+from typing import Any
+from typing import BinaryIO
+from typing import Callable
+from typing import Collection
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Hashable
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+from _typeshed.wsgi import WSGIEnvironment
+
+from typing_extensions import Literal
+
+K = TypeVar("K")
+V = TypeVar("V")
+T = TypeVar("T")
+
+def is_immutable(self: object) -> NoReturn: ...
+def iter_multi_items(
+    mapping: Union[Mapping[K, Union[V, Iterable[V]]], Iterable[Tuple[K, V]]]
+) -> Iterator[Tuple[K, V]]: ...
+
+class ImmutableListMixin(List[V]):
+    _hash_cache: Optional[int]
+    def __hash__(self) -> int: ...  # type: ignore
+    def __delitem__(self, key: Union[int, slice]) -> NoReturn: ...
+    def __iadd__(self, other: t.Any) -> NoReturn: ...  # type: ignore
+    def __imul__(self, other: int) -> NoReturn: ...
+    def __setitem__(  # type: ignore
+        self, key: Union[int, slice], value: V
+    ) -> NoReturn: ...
+    def append(self, value: V) -> NoReturn: ...
+    def remove(self, value: V) -> NoReturn: ...
+    def extend(self, values: Iterable[V]) -> NoReturn: ...
+    def insert(self, pos: int, value: V) -> NoReturn: ...
+    def pop(self, index: int = -1) -> NoReturn: ...
+    def reverse(self) -> NoReturn: ...
+    def sort(
+        self, key: Optional[Callable[[V], Any]] = None, reverse: bool = False
+    ) -> NoReturn: ...
+
+class ImmutableList(ImmutableListMixin[V]): ...
+
+class ImmutableDictMixin(Dict[K, V]):
+    _hash_cache: Optional[int]
+    @classmethod
+    def fromkeys(  # type: ignore
+        cls, keys: Iterable[K], value: Optional[V] = None
+    ) -> ImmutableDictMixin[K, V]: ...
+    def _iter_hashitems(self) -> Iterable[Hashable]: ...
+    def __hash__(self) -> int: ...  # type: ignore
+    def setdefault(self, key: K, default: Optional[V] = None) -> NoReturn: ...
+    def update(self, *args: Any, **kwargs: V) -> NoReturn: ...
+    def pop(self, key: K, default: Optional[V] = None) -> NoReturn: ...  # type: ignore
+    def popitem(self) -> NoReturn: ...
+    def __setitem__(self, key: K, value: V) -> NoReturn: ...
+    def __delitem__(self, key: K) -> NoReturn: ...
+    def clear(self) -> NoReturn: ...
+
+class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]):
+    def _iter_hashitems(self) -> Iterable[Hashable]: ...
+    def add(self, key: K, value: V) -> NoReturn: ...
+    def popitemlist(self) -> NoReturn: ...
+    def poplist(self, key: K) -> NoReturn: ...
+    def setlist(self, key: K, new_list: Iterable[V]) -> NoReturn: ...
+    def setlistdefault(
+        self, key: K, default_list: Optional[Iterable[V]] = None
+    ) -> NoReturn: ...
+
+def _calls_update(name: str) -> Callable[[UpdateDictMixin[K, V]], Any]: ...
+
+class UpdateDictMixin(Dict[K, V]):
+    on_update: Optional[Callable[[UpdateDictMixin[K, V]], None]]
+    def setdefault(self, key: K, default: Optional[V] = None) -> V: ...
+    @overload
+    def pop(self, key: K) -> V: ...
+    @overload
+    def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
+    def __setitem__(self, key: K, value: V) -> None: ...
+    def __delitem__(self, key: K) -> None: ...
+    def clear(self) -> None: ...
+    def popitem(self) -> Tuple[K, V]: ...
+    def update(
+        self, *args: Union[Mapping[K, V], Iterable[Tuple[K, V]]], **kwargs: V
+    ) -> None: ...
+
+class TypeConversionDict(Dict[K, V]):
+    @overload  # type: ignore
+    def get(self, key: K) -> Optional[V]: ...
+    @overload
+    def get(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
+    @overload
+    def get(
+        self, key: K, default: Optional[T] = None, type: Callable[[V], T] = ...
+    ) -> Optional[T]: ...
+
+class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]):
+    def copy(self) -> TypeConversionDict[K, V]: ...
+    def __copy__(self) -> ImmutableTypeConversionDict: ...
+
+class MultiDict(TypeConversionDict[K, V]):
+    def __init__(
+        self,
+        mapping: Optional[
+            Union[Mapping[K, Union[V, Iterable[V]]], Iterable[Tuple[K, V]]]
+        ] = None,
+    ) -> None: ...
+    def __getitem__(self, item: K) -> V: ...
+    def __setitem__(self, key: K, value: V) -> None: ...
+    def add(self, key: K, value: V) -> None: ...
+    @overload
+    def getlist(self, key: K) -> List[V]: ...
+    @overload
+    def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
+    def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
+    def setdefault(self, key: K, default: Optional[V] = None) -> V: ...
+    def setlistdefault(
+        self, key: K, default_list: Optional[Iterable[V]] = None
+    ) -> List[V]: ...
+    def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ...  # type: ignore
+    def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
+    def values(self) -> Iterator[V]: ...  # type: ignore
+    def listvalues(self) -> Iterator[List[V]]: ...
+    def copy(self) -> MultiDict[K, V]: ...
+    def deepcopy(self, memo: Any = None) -> MultiDict[K, V]: ...
+    @overload
+    def to_dict(self) -> Dict[K, V]: ...
+    @overload
+    def to_dict(self, flat: Literal[False]) -> Dict[K, List[V]]: ...
+    def update(  # type: ignore
+        self, mapping: Union[Mapping[K, V], Iterable[Tuple[K, V]]]
+    ) -> None: ...
+    @overload
+    def pop(self, key: K) -> V: ...
+    @overload
+    def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
+    def popitem(self) -> Tuple[K, V]: ...
+    def poplist(self, key: K) -> List[V]: ...
+    def popitemlist(self) -> Tuple[K, List[V]]: ...
+    def __copy__(self) -> MultiDict[K, V]: ...
+    def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ...
+
+class _omd_bucket(Generic[K, V]):
+    prev: Optional[_omd_bucket]
+    next: Optional[_omd_bucket]
+    key: K
+    value: V
+    def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ...
+    def unlink(self, omd: OrderedMultiDict) -> None: ...
+
+class OrderedMultiDict(MultiDict[K, V]):
+    _first_bucket: Optional[_omd_bucket]
+    _last_bucket: Optional[_omd_bucket]
+    def __init__(self, mapping: Optional[Mapping[K, V]] = None) -> None: ...
+    def __eq__(self, other: object) -> bool: ...
+    def __getitem__(self, key: K) -> V: ...
+    def __setitem__(self, key: K, value: V) -> None: ...
+    def __delitem__(self, key: K) -> None: ...
+    def keys(self) -> Iterator[K]: ...  # type: ignore
+    def __iter__(self) -> Iterator[K]: ...
+    def values(self) -> Iterator[V]: ...  # type: ignore
+    def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ...  # type: ignore
+    def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
+    def listvalues(self) -> Iterator[List[V]]: ...
+    def add(self, key: K, value: V) -> None: ...
+    @overload
+    def getlist(self, key: K) -> List[V]: ...
+    @overload
+    def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
+    def setlist(self, key: K, new_list: Iterable[V]) -> None: ...
+    def setlistdefault(
+        self, key: K, default_list: Optional[Iterable[V]] = None
+    ) -> List[V]: ...
+    def update(  # type: ignore
+        self, mapping: Union[Mapping[K, V], Iterable[Tuple[K, V]]]
+    ) -> None: ...
+    def poplist(self, key: K) -> List[V]: ...
+    @overload
+    def pop(self, key: K) -> V: ...
+    @overload
+    def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
+    def popitem(self) -> Tuple[K, V]: ...
+    def popitemlist(self) -> Tuple[K, List[V]]: ...
+
+def _options_header_vkw(
+    value: str, kw: Mapping[str, Optional[Union[str, int]]]
+) -> str: ...
+def _unicodify_header_value(value: Union[str, int]) -> str: ...
+
+HV = Union[str, int]
+
+class Headers(Dict[str, str]):
+    _list: List[Tuple[str, str]]
+    def __init__(
+        self,
+        defaults: Optional[
+            Union[Mapping[str, Union[HV, Iterable[HV]]], Iterable[Tuple[str, HV]]]
+        ] = None,
+    ) -> None: ...
+    @overload
+    def __getitem__(self, key: str) -> str: ...
+    @overload
+    def __getitem__(self, key: int) -> Tuple[str, str]: ...
+    @overload
+    def __getitem__(self, key: slice) -> Headers: ...
+    @overload
+    def __getitem__(self, key: str, _get_mode: Literal[True] = ...) -> str: ...
+    def __eq__(self, other: object) -> bool: ...
+    @overload  # type: ignore
+    def get(self, key: str, default: str) -> str: ...
+    @overload
+    def get(self, key: str, default: Optional[str] = None) -> Optional[str]: ...
+    @overload
+    def get(
+        self, key: str, default: Optional[T] = None, type: Callable[[str], T] = ...
+    ) -> Optional[T]: ...
+    @overload
+    def getlist(self, key: str) -> List[str]: ...
+    @overload
+    def getlist(self, key: str, type: Callable[[str], T]) -> List[T]: ...
+    def get_all(self, name: str) -> List[str]: ...
+    def items(  # type: ignore
+        self, lower: bool = False
+    ) -> Iterator[Tuple[str, str]]: ...
+    def keys(self, lower: bool = False) -> Iterator[str]: ...  # type: ignore
+    def values(self) -> Iterator[str]: ...  # type: ignore
+    def extend(
+        self,
+        *args: Union[Mapping[str, Union[HV, Iterable[HV]]], Iterable[Tuple[str, HV]]],
+        **kwargs: Union[HV, Iterable[HV]],
+    ) -> None: ...
+    @overload
+    def __delitem__(self, key: Union[str, int, slice]) -> None: ...
+    @overload
+    def __delitem__(self, key: str, _index_operation: Literal[False]) -> None: ...
+    def remove(self, key: str) -> None: ...
+    @overload  # type: ignore
+    def pop(self, key: str, default: Optional[str] = None) -> str: ...
+    @overload
+    def pop(
+        self, key: Optional[int] = None, default: Optional[Tuple[str, str]] = None
+    ) -> Tuple[str, str]: ...
+    def popitem(self) -> Tuple[str, str]: ...
+    def __contains__(self, key: str) -> bool: ...  # type: ignore
+    def has_key(self, key: str) -> bool: ...
+    def __iter__(self) -> Iterator[Tuple[str, str]]: ...  # type: ignore
+    def add(self, _key: str, _value: HV, **kw: HV) -> None: ...
+    def _validate_value(self, value: str) -> None: ...
+    def add_header(self, _key: str, _value: HV, **_kw: HV) -> None: ...
+    def clear(self) -> None: ...
+    def set(self, _key: str, _value: HV, **kw: HV) -> None: ...
+    def setlist(self, key: str, values: Iterable[HV]) -> None: ...
+    def setdefault(self, key: str, default: HV) -> str: ...  # type: ignore
+    def setlistdefault(self, key: str, default: Iterable[HV]) -> None: ...
+    @overload
+    def __setitem__(self, key: str, value: HV) -> None: ...
+    @overload
+    def __setitem__(self, key: int, value: Tuple[str, HV]) -> None: ...
+    @overload
+    def __setitem__(self, key: slice, value: Iterable[Tuple[str, HV]]) -> None: ...
+    def update(
+        self,
+        *args: Union[Mapping[str, HV], Iterable[Tuple[str, HV]]],
+        **kwargs: Union[HV, Iterable[HV]],
+    ) -> None: ...
+    def to_wsgi_list(self) -> List[Tuple[str, str]]: ...
+    def copy(self) -> Headers: ...
+    def __copy__(self) -> Headers: ...
+
+class ImmutableHeadersMixin(Headers):
+    def __delitem__(self, key: Any, _index_operation: bool = True) -> NoReturn: ...
+    def __setitem__(self, key: Any, value: Any) -> NoReturn: ...
+    def set(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
+    def setlist(self, key: Any, values: Any) -> NoReturn: ...
+    def add(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ...
+    def add_header(self, _key: Any, _value: Any, **_kw: Any) -> NoReturn: ...
+    def remove(self, key: Any) -> NoReturn: ...
+    def extend(self, *args: Any, **kwargs: Any) -> NoReturn: ...
+    def update(self, *args: Any, **kwargs: Any) -> NoReturn: ...
+    def insert(self, pos: Any, value: Any) -> NoReturn: ...
+    def pop(self, key: Any = None, default: Any = ...) -> NoReturn: ...
+    def popitem(self) -> NoReturn: ...
+    def setdefault(self, key: Any, default: Any) -> NoReturn: ...  # type: ignore
+    def setlistdefault(self, key: Any, default: Any) -> NoReturn: ...
+
+class EnvironHeaders(ImmutableHeadersMixin, Headers):
+    environ: WSGIEnvironment
+    def __init__(self, environ: WSGIEnvironment) -> None: ...
+    def __eq__(self, other: object) -> bool: ...
+    def __getitem__(  # type: ignore
+        self, key: str, _get_mode: Literal[False] = False
+    ) -> str: ...
+    def __iter__(self) -> Iterator[Tuple[str, str]]: ...  # type: ignore
+    def copy(self) -> NoReturn: ...
+
+class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]):  # type: ignore
+    dicts: List[MultiDict[K, V]]
+    def __init__(self, dicts: Optional[Iterable[MultiDict[K, V]]]) -> None: ...
+    @classmethod
+    def fromkeys(cls, keys: Any, value: Any = None) -> NoReturn: ...
+    def __getitem__(self, key: K) -> V: ...
+    @overload  # type: ignore
+    def get(self, key: K) -> Optional[V]: ...
+    @overload
+    def get(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ...
+    @overload
+    def get(
+        self, key: K, default: Optional[T] = None, type: Callable[[V], T] = ...
+    ) -> Optional[T]: ...
+    @overload
+    def getlist(self, key: K) -> List[V]: ...
+    @overload
+    def getlist(self, key: K, type: Callable[[V], T] = ...) -> List[T]: ...
+    def _keys_impl(self) -> Set[K]: ...
+    def keys(self) -> Set[K]: ...  # type: ignore
+    def __iter__(self) -> Set[K]: ...  # type: ignore
+    def items(self, multi: bool = False) -> Iterator[Tuple[K, V]]: ...  # type: ignore
+    def values(self) -> Iterator[V]: ...  # type: ignore
+    def lists(self) -> Iterator[Tuple[K, List[V]]]: ...
+    def listvalues(self) -> Iterator[List[V]]: ...
+    def copy(self) -> MultiDict[K, V]: ...
+    @overload
+    def to_dict(self) -> Dict[K, V]: ...
+    @overload
+    def to_dict(self, flat: Literal[False]) -> Dict[K, List[V]]: ...
+    def __contains__(self, key: K) -> bool: ...  # type: ignore
+    def has_key(self, key: K) -> bool: ...
+
+class FileMultiDict(MultiDict[str, "FileStorage"]):
+    def add_file(
+        self,
+        name: str,
+        file: Union[FileStorage, str, BinaryIO],
+        filename: Optional[str] = None,
+        content_type: Optional[str] = None,
+    ) -> None: ...
+
+class ImmutableDict(ImmutableDictMixin[K, V], Dict[K, V]):
+    def copy(self) -> Dict[K, V]: ...
+    def __copy__(self) -> ImmutableDict[K, V]: ...
+
+class ImmutableMultiDict(  # type: ignore
+    ImmutableMultiDictMixin[K, V], MultiDict[K, V]
+):
+    def copy(self) -> MultiDict[K, V]: ...
+    def __copy__(self) -> ImmutableMultiDict[K, V]: ...
+
+class ImmutableOrderedMultiDict(  # type: ignore
+    ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V]
+):
+    def _iter_hashitems(self) -> Iterator[Tuple[int, Tuple[K, V]]]: ...
+    def copy(self) -> OrderedMultiDict[K, V]: ...
+    def __copy__(self) -> ImmutableOrderedMultiDict[K, V]: ...
+
+class Accept(ImmutableList[Tuple[str, int]]):
+    provided: bool
+    def __init__(
+        self, values: Optional[Union[Accept, Iterable[Tuple[str, float]]]] = None
+    ) -> None: ...
+    def _specificity(self, value: str) -> Tuple[bool, ...]: ...
+    def _value_matches(self, value: str, item: str) -> bool: ...
+    @overload  # type: ignore
+    def __getitem__(self, key: str) -> int: ...
+    @overload
+    def __getitem__(self, key: int) -> Tuple[str, int]: ...
+    @overload
+    def __getitem__(self, key: slice) -> Iterable[Tuple[str, int]]: ...
+    def quality(self, key: str) -> int: ...
+    def __contains__(self, value: str) -> bool: ...  # type: ignore
+    def index(self, key: str) -> int: ...  # type: ignore
+    def find(self, key: str) -> int: ...
+    def values(self) -> Iterator[str]: ...
+    def to_header(self) -> str: ...
+    def _best_single_match(self, match: str) -> Optional[Tuple[str, int]]: ...
+    def best_match(
+        self, matches: Iterable[str], default: Optional[str] = None
+    ) -> Optional[str]: ...
+    @property
+    def best(self) -> str: ...
+
+def _normalize_mime(value: str) -> List[str]: ...
+
+class MIMEAccept(Accept):
+    def _specificity(self, value: str) -> Tuple[bool, ...]: ...
+    def _value_matches(self, value: str, item: str) -> bool: ...
+    @property
+    def accept_html(self) -> bool: ...
+    @property
+    def accept_xhtml(self) -> bool: ...
+    @property
+    def accept_json(self) -> bool: ...
+
+def _normalize_lang(value: str) -> List[str]: ...
+
+class LanguageAccept(Accept):
+    def _value_matches(self, value: str, item: str) -> bool: ...
+    def best_match(
+        self, matches: Iterable[str], default: Optional[str] = None
+    ) -> Optional[str]: ...
+
+class CharsetAccept(Accept):
+    def _value_matches(self, value: str, item: str) -> bool: ...
+
+_CPT = TypeVar("_CPT", str, int, bool)
+_OptCPT = Optional[_CPT]
+
+def cache_property(key: str, empty: _OptCPT, type: Type[_CPT]) -> property: ...
+
+class _CacheControl(UpdateDictMixin[str, _OptCPT], Dict[str, _OptCPT]):
+    provided: bool
+    def __init__(
+        self,
+        values: Union[Mapping[str, _OptCPT], Iterable[Tuple[str, _OptCPT]]] = (),
+        on_update: Optional[Callable[[_CacheControl], None]] = None,
+    ) -> None: ...
+    @property
+    def no_cache(self) -> Optional[bool]: ...
+    @no_cache.setter
+    def no_cache(self, value: Optional[bool]) -> None: ...
+    @no_cache.deleter
+    def no_cache(self) -> None: ...
+    @property
+    def no_store(self) -> Optional[bool]: ...
+    @no_store.setter
+    def no_store(self, value: Optional[bool]) -> None: ...
+    @no_store.deleter
+    def no_store(self) -> None: ...
+    @property
+    def max_age(self) -> Optional[int]: ...
+    @max_age.setter
+    def max_age(self, value: Optional[int]) -> None: ...
+    @max_age.deleter
+    def max_age(self) -> None: ...
+    @property
+    def no_transform(self) -> Optional[bool]: ...
+    @no_transform.setter
+    def no_transform(self, value: Optional[bool]) -> None: ...
+    @no_transform.deleter
+    def no_transform(self) -> None: ...
+    def _get_cache_value(self, key: str, empty: Optional[T], type: Type[T]) -> T: ...
+    def _set_cache_value(self, key: str, value: Optional[T], type: Type[T]) -> None: ...
+    def _del_cache_value(self, key: str) -> None: ...
+    def to_header(self) -> str: ...
+    @staticmethod
+    def cache_property(key: str, empty: _OptCPT, type: Type[_CPT]) -> property: ...
+
+class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl):
+    @property
+    def max_stale(self) -> Optional[int]: ...
+    @max_stale.setter
+    def max_stale(self, value: Optional[int]) -> None: ...
+    @max_stale.deleter
+    def max_stale(self) -> None: ...
+    @property
+    def min_fresh(self) -> Optional[int]: ...
+    @min_fresh.setter
+    def min_fresh(self, value: Optional[int]) -> None: ...
+    @min_fresh.deleter
+    def min_fresh(self) -> None: ...
+    @property
+    def only_if_cached(self) -> Optional[bool]: ...
+    @only_if_cached.setter
+    def only_if_cached(self, value: Optional[bool]) -> None: ...
+    @only_if_cached.deleter
+    def only_if_cached(self) -> None: ...
+
+class ResponseCacheControl(_CacheControl):
+    @property
+    def public(self) -> Optional[bool]: ...
+    @public.setter
+    def public(self, value: Optional[bool]) -> None: ...
+    @public.deleter
+    def public(self) -> None: ...
+    @property
+    def private(self) -> Optional[bool]: ...
+    @private.setter
+    def private(self, value: Optional[bool]) -> None: ...
+    @private.deleter
+    def private(self) -> None: ...
+    @property
+    def must_revalidate(self) -> Optional[bool]: ...
+    @must_revalidate.setter
+    def must_revalidate(self, value: Optional[bool]) -> None: ...
+    @must_revalidate.deleter
+    def must_revalidate(self) -> None: ...
+    @property
+    def proxy_revalidate(self) -> Optional[bool]: ...
+    @proxy_revalidate.setter
+    def proxy_revalidate(self, value: Optional[bool]) -> None: ...
+    @proxy_revalidate.deleter
+    def proxy_revalidate(self) -> None: ...
+    @property
+    def s_maxage(self) -> Optional[int]: ...
+    @s_maxage.setter
+    def s_maxage(self, value: Optional[int]) -> None: ...
+    @s_maxage.deleter
+    def s_maxage(self) -> None: ...
+    @property
+    def immutable(self) -> Optional[bool]: ...
+    @immutable.setter
+    def immutable(self, value: Optional[bool]) -> None: ...
+    @immutable.deleter
+    def immutable(self) -> None: ...
+
+def csp_property(key: str) -> property: ...
+
+class ContentSecurityPolicy(UpdateDictMixin[str, str], Dict[str, str]):
+    @property
+    def base_uri(self) -> Optional[str]: ...
+    @base_uri.setter
+    def base_uri(self, value: Optional[str]) -> None: ...
+    @base_uri.deleter
+    def base_uri(self) -> None: ...
+    @property
+    def child_src(self) -> Optional[str]: ...
+    @child_src.setter
+    def child_src(self, value: Optional[str]) -> None: ...
+    @child_src.deleter
+    def child_src(self) -> None: ...
+    @property
+    def connect_src(self) -> Optional[str]: ...
+    @connect_src.setter
+    def connect_src(self, value: Optional[str]) -> None: ...
+    @connect_src.deleter
+    def connect_src(self) -> None: ...
+    @property
+    def default_src(self) -> Optional[str]: ...
+    @default_src.setter
+    def default_src(self, value: Optional[str]) -> None: ...
+    @default_src.deleter
+    def default_src(self) -> None: ...
+    @property
+    def font_src(self) -> Optional[str]: ...
+    @font_src.setter
+    def font_src(self, value: Optional[str]) -> None: ...
+    @font_src.deleter
+    def font_src(self) -> None: ...
+    @property
+    def form_action(self) -> Optional[str]: ...
+    @form_action.setter
+    def form_action(self, value: Optional[str]) -> None: ...
+    @form_action.deleter
+    def form_action(self) -> None: ...
+    @property
+    def frame_ancestors(self) -> Optional[str]: ...
+    @frame_ancestors.setter
+    def frame_ancestors(self, value: Optional[str]) -> None: ...
+    @frame_ancestors.deleter
+    def frame_ancestors(self) -> None: ...
+    @property
+    def frame_src(self) -> Optional[str]: ...
+    @frame_src.setter
+    def frame_src(self, value: Optional[str]) -> None: ...
+    @frame_src.deleter
+    def frame_src(self) -> None: ...
+    @property
+    def img_src(self) -> Optional[str]: ...
+    @img_src.setter
+    def img_src(self, value: Optional[str]) -> None: ...
+    @img_src.deleter
+    def img_src(self) -> None: ...
+    @property
+    def manifest_src(self) -> Optional[str]: ...
+    @manifest_src.setter
+    def manifest_src(self, value: Optional[str]) -> None: ...
+    @manifest_src.deleter
+    def manifest_src(self) -> None: ...
+    @property
+    def media_src(self) -> Optional[str]: ...
+    @media_src.setter
+    def media_src(self, value: Optional[str]) -> None: ...
+    @media_src.deleter
+    def media_src(self) -> None: ...
+    @property
+    def navigate_to(self) -> Optional[str]: ...
+    @navigate_to.setter
+    def navigate_to(self, value: Optional[str]) -> None: ...
+    @navigate_to.deleter
+    def navigate_to(self) -> None: ...
+    @property
+    def object_src(self) -> Optional[str]: ...
+    @object_src.setter
+    def object_src(self, value: Optional[str]) -> None: ...
+    @object_src.deleter
+    def object_src(self) -> None: ...
+    @property
+    def prefetch_src(self) -> Optional[str]: ...
+    @prefetch_src.setter
+    def prefetch_src(self, value: Optional[str]) -> None: ...
+    @prefetch_src.deleter
+    def prefetch_src(self) -> None: ...
+    @property
+    def plugin_types(self) -> Optional[str]: ...
+    @plugin_types.setter
+    def plugin_types(self, value: Optional[str]) -> None: ...
+    @plugin_types.deleter
+    def plugin_types(self) -> None: ...
+    @property
+    def report_to(self) -> Optional[str]: ...
+    @report_to.setter
+    def report_to(self, value: Optional[str]) -> None: ...
+    @report_to.deleter
+    def report_to(self) -> None: ...
+    @property
+    def report_uri(self) -> Optional[str]: ...
+    @report_uri.setter
+    def report_uri(self, value: Optional[str]) -> None: ...
+    @report_uri.deleter
+    def report_uri(self) -> None: ...
+    @property
+    def sandbox(self) -> Optional[str]: ...
+    @sandbox.setter
+    def sandbox(self, value: Optional[str]) -> None: ...
+    @sandbox.deleter
+    def sandbox(self) -> None: ...
+    @property
+    def script_src(self) -> Optional[str]: ...
+    @script_src.setter
+    def script_src(self, value: Optional[str]) -> None: ...
+    @script_src.deleter
+    def script_src(self) -> None: ...
+    @property
+    def script_src_attr(self) -> Optional[str]: ...
+    @script_src_attr.setter
+    def script_src_attr(self, value: Optional[str]) -> None: ...
+    @script_src_attr.deleter
+    def script_src_attr(self) -> None: ...
+    @property
+    def script_src_elem(self) -> Optional[str]: ...
+    @script_src_elem.setter
+    def script_src_elem(self, value: Optional[str]) -> None: ...
+    @script_src_elem.deleter
+    def script_src_elem(self) -> None: ...
+    @property
+    def style_src(self) -> Optional[str]: ...
+    @style_src.setter
+    def style_src(self, value: Optional[str]) -> None: ...
+    @style_src.deleter
+    def style_src(self) -> None: ...
+    @property
+    def style_src_attr(self) -> Optional[str]: ...
+    @style_src_attr.setter
+    def style_src_attr(self, value: Optional[str]) -> None: ...
+    @style_src_attr.deleter
+    def style_src_attr(self) -> None: ...
+    @property
+    def style_src_elem(self) -> Optional[str]: ...
+    @style_src_elem.setter
+    def style_src_elem(self, value: Optional[str]) -> None: ...
+    @style_src_elem.deleter
+    def style_src_elem(self) -> None: ...
+    @property
+    def worker_src(self) -> Optional[str]: ...
+    @worker_src.setter
+    def worker_src(self, value: Optional[str]) -> None: ...
+    @worker_src.deleter
+    def worker_src(self) -> None: ...
+    provided: bool
+    def __init__(
+        self,
+        values: Union[Mapping[str, str], Iterable[Tuple[str, str]]] = (),
+        on_update: Optional[Callable[[ContentSecurityPolicy], None]] = None,
+    ) -> None: ...
+    def _get_value(self, key: str) -> Optional[str]: ...
+    def _set_value(self, key: str, value: str) -> None: ...
+    def _del_value(self, key: str) -> None: ...
+    def to_header(self) -> str: ...
+
+class CallbackDict(UpdateDictMixin[K, V], Dict[K, V]):
+    def __init__(
+        self,
+        initial: Optional[Union[Mapping[K, V], Iterable[Tuple[K, V]]]] = None,
+        on_update: Optional[Callable[[CallbackDict], None]] = None,
+    ) -> None: ...
+
+class HeaderSet(Set[str]):
+    _headers: List[str]
+    _set: Set[str]
+    on_update: Optional[Callable[[HeaderSet], None]]
+    def __init__(
+        self,
+        headers: Optional[Iterable[str]] = None,
+        on_update: Optional[Callable[[HeaderSet], None]] = None,
+    ) -> None: ...
+    def add(self, header: str) -> None: ...
+    def remove(self, header: str) -> None: ...
+    def update(self, iterable: Iterable[str]) -> None: ...  # type: ignore
+    def discard(self, header: str) -> None: ...
+    def find(self, header: str) -> int: ...
+    def index(self, header: str) -> int: ...
+    def clear(self) -> None: ...
+    def as_set(self, preserve_casing: bool = False) -> Set[str]: ...
+    def to_header(self) -> str: ...
+    def __getitem__(self, idx: int) -> str: ...
+    def __delitem__(self, idx: int) -> None: ...
+    def __setitem__(self, idx: int, value: str) -> None: ...
+    def __contains__(self, header: str) -> bool: ...  # type: ignore
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[str]: ...
+
+class ETags(Collection[str]):
+    _strong: FrozenSet[str]
+    _weak: FrozenSet[str]
+    star_tag: bool
+    def __init__(
+        self,
+        strong_etags: Optional[Iterable[str]] = None,
+        weak_etags: Optional[Iterable[str]] = None,
+        star_tag: bool = False,
+    ) -> None: ...
+    def as_set(self, include_weak: bool = False) -> Set[str]: ...
+    def is_weak(self, etag: str) -> bool: ...
+    def is_strong(self, etag: str) -> bool: ...
+    def contains_weak(self, etag: str) -> bool: ...
+    def contains(self, etag: str) -> bool: ...
+    def contains_raw(self, etag: str) -> bool: ...
+    def to_header(self) -> str: ...
+    def __call__(
+        self,
+        etag: Optional[str] = None,
+        data: Optional[bytes] = None,
+        include_weak: bool = False,
+    ) -> bool: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __contains__(self, item: str) -> bool: ...  # type: ignore
+
+class IfRange:
+    etag: Optional[str]
+    date: Optional[datetime]
+    def __init__(
+        self, etag: Optional[str] = None, date: Optional[datetime] = None
+    ) -> None: ...
+    def to_header(self) -> str: ...
+
+class Range:
+    units: str
+    ranges: List[Tuple[int, Optional[int]]]
+    def __init__(self, units: str, ranges: List[Tuple[int, Optional[int]]]) -> None: ...
+    def range_for_length(self, length: Optional[int]) -> Optional[Tuple[int, int]]: ...
+    def make_content_range(self, length: Optional[int]) -> Optional[ContentRange]: ...
+    def to_header(self) -> str: ...
+    def to_content_range_header(self, length: Optional[int]) -> Optional[str]: ...
+
+def _callback_property(name: str) -> property: ...
+
+class ContentRange:
+    on_update: Optional[Callable[[ContentRange], None]]
+    def __init__(
+        self,
+        units: Optional[str],
+        start: Optional[int],
+        stop: Optional[int],
+        length: Optional[int] = None,
+        on_update: Optional[Callable[[ContentRange], None]] = None,
+    ) -> None: ...
+    @property
+    def units(self) -> Optional[str]: ...
+    @units.setter
+    def units(self, value: Optional[str]) -> None: ...
+    @property
+    def start(self) -> Optional[int]: ...
+    @start.setter
+    def start(self, value: Optional[int]) -> None: ...
+    @property
+    def stop(self) -> Optional[int]: ...
+    @stop.setter
+    def stop(self, value: Optional[int]) -> None: ...
+    @property
+    def length(self) -> Optional[int]: ...
+    @length.setter
+    def length(self, value: Optional[int]) -> None: ...
+    def set(
+        self,
+        start: Optional[int],
+        stop: Optional[int],
+        length: Optional[int] = None,
+        units: Optional[str] = "bytes",
+    ) -> None: ...
+    def unset(self) -> None: ...
+    def to_header(self) -> str: ...
+
+class Authorization(ImmutableDictMixin[str, str], Dict[str, str]):
+    type: str
+    def __init__(
+        self,
+        auth_type: str,
+        data: Optional[Union[Mapping[str, str], Iterable[Tuple[str, str]]]] = None,
+    ) -> None: ...
+    @property
+    def username(self) -> Optional[str]: ...
+    @property
+    def password(self) -> Optional[str]: ...
+    @property
+    def realm(self) -> Optional[str]: ...
+    @property
+    def nonce(self) -> Optional[str]: ...
+    @property
+    def uri(self) -> Optional[str]: ...
+    @property
+    def nc(self) -> Optional[str]: ...
+    @property
+    def cnonce(self) -> Optional[str]: ...
+    @property
+    def response(self) -> Optional[str]: ...
+    @property
+    def opaque(self) -> Optional[str]: ...
+    @property
+    def qop(self) -> Optional[str]: ...
+    def to_header(self) -> str: ...
+
+def auth_property(name: str, doc: Optional[str] = None) -> property: ...
+def _set_property(name: str, doc: Optional[str] = None) -> property: ...
+
+class WWWAuthenticate(UpdateDictMixin[str, str], Dict[str, str]):
+    _require_quoting: FrozenSet[str]
+    def __init__(
+        self,
+        auth_type: Optional[str] = None,
+        values: Optional[Union[Mapping[str, str], Iterable[Tuple[str, str]]]] = None,
+        on_update: Optional[Callable[[WWWAuthenticate], None]] = None,
+    ) -> None: ...
+    def set_basic(self, realm: str = ...) -> None: ...
+    def set_digest(
+        self,
+        realm: str,
+        nonce: str,
+        qop: Iterable[str] = ("auth",),
+        opaque: Optional[str] = None,
+        algorithm: Optional[str] = None,
+        stale: bool = False,
+    ) -> None: ...
+    def to_header(self) -> str: ...
+    @property
+    def type(self) -> Optional[str]: ...
+    @type.setter
+    def type(self, value: Optional[str]) -> None: ...
+    @property
+    def realm(self) -> Optional[str]: ...
+    @realm.setter
+    def realm(self, value: Optional[str]) -> None: ...
+    @property
+    def domain(self) -> HeaderSet: ...
+    @property
+    def nonce(self) -> Optional[str]: ...
+    @nonce.setter
+    def nonce(self, value: Optional[str]) -> None: ...
+    @property
+    def opaque(self) -> Optional[str]: ...
+    @opaque.setter
+    def opaque(self, value: Optional[str]) -> None: ...
+    @property
+    def algorithm(self) -> Optional[str]: ...
+    @algorithm.setter
+    def algorithm(self, value: Optional[str]) -> None: ...
+    @property
+    def qop(self) -> HeaderSet: ...
+    @property
+    def stale(self) -> Optional[bool]: ...
+    @stale.setter
+    def stale(self, value: Optional[bool]) -> None: ...
+    @staticmethod
+    def auth_property(name: str, doc: Optional[str] = None) -> property: ...
+
+class FileStorage:
+    name: Optional[str]
+    stream: BinaryIO
+    filename: Optional[str]
+    headers: Headers
+    _parsed_content_type: Tuple[str, Dict[str, str]]
+    def __init__(
+        self,
+        stream: Optional[BinaryIO] = None,
+        filename: Optional[str] = None,
+        name: Optional[str] = None,
+        content_type: Optional[str] = None,
+        content_length: Optional[int] = None,
+        headers: Optional[Headers] = None,
+    ) -> None: ...
+    def _parse_content_type(self) -> None: ...
+    @property
+    def content_type(self) -> str: ...
+    @property
+    def content_length(self) -> int: ...
+    @property
+    def mimetype(self) -> str: ...
+    @property
+    def mimetype_params(self) -> Dict[str, str]: ...
+    def save(
+        self, dst: Union[str, PathLike, BinaryIO], buffer_size: int = ...
+    ) -> None: ...
+    def close(self) -> None: ...
+    def __iter__(self) -> Iterator[bytes]: ...
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/__init__.py b/venv/lib/python3.7/site-packages/werkzeug/debug/__init__.py
new file mode 100644
index 00000000..ca46bb92
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/__init__.py
@@ -0,0 +1,501 @@
+import getpass
+import hashlib
+import json
+import mimetypes
+import os
+import pkgutil
+import re
+import sys
+import time
+import typing as t
+import uuid
+from itertools import chain
+from os.path import basename
+from os.path import join
+
+from .._internal import _log
+from ..http import parse_cookie
+from ..security import gen_salt
+from ..wrappers.request import Request
+from ..wrappers.response import Response
+from .console import Console
+from .tbtools import Frame
+from .tbtools import get_current_traceback
+from .tbtools import render_console_html
+from .tbtools import Traceback
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+# A week
+PIN_TIME = 60 * 60 * 24 * 7
+
+
+def hash_pin(pin: str) -> str:
+    return hashlib.sha1(f"{pin} added salt".encode("utf-8", "replace")).hexdigest()[:12]
+
+
+_machine_id: t.Optional[t.Union[str, bytes]] = None
+
+
+def get_machine_id() -> t.Optional[t.Union[str, bytes]]:
+    global _machine_id
+
+    if _machine_id is not None:
+        return _machine_id
+
+    def _generate() -> t.Optional[t.Union[str, bytes]]:
+        linux = b""
+
+        # machine-id is stable across boots, boot_id is not.
+        for filename in "/etc/machine-id", "/proc/sys/kernel/random/boot_id":
+            try:
+                with open(filename, "rb") as f:
+                    value = f.readline().strip()
+            except OSError:
+                continue
+
+            if value:
+                linux += value
+                break
+
+        # Containers share the same machine id, add some cgroup
+        # information. This is used outside containers too but should be
+        # relatively stable across boots.
+        try:
+            with open("/proc/self/cgroup", "rb") as f:
+                linux += f.readline().strip().rpartition(b"/")[2]
+        except OSError:
+            pass
+
+        if linux:
+            return linux
+
+        # On OS X, use ioreg to get the computer's serial number.
+        try:
+            # subprocess may not be available, e.g. Google App Engine
+            # https://github.com/pallets/werkzeug/issues/925
+            from subprocess import Popen, PIPE
+
+            dump = Popen(
+                ["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE
+            ).communicate()[0]
+            match = re.search(b'"serial-number" = <([^>]+)', dump)
+
+            if match is not None:
+                return match.group(1)
+        except (OSError, ImportError):
+            pass
+
+        # On Windows, use winreg to get the machine guid.
+        try:
+            import winreg
+        except ImportError:
+            pass
+        else:
+            try:
+                with winreg.OpenKey(
+                    winreg.HKEY_LOCAL_MACHINE,
+                    "SOFTWARE\\Microsoft\\Cryptography",
+                    0,
+                    winreg.KEY_READ | winreg.KEY_WOW64_64KEY,
+                ) as rk:
+                    guid: t.Union[str, bytes]
+                    guid_type: int
+                    guid, guid_type = winreg.QueryValueEx(rk, "MachineGuid")
+
+                    if guid_type == winreg.REG_SZ:
+                        return guid.encode("utf-8")  # type: ignore
+
+                    return guid
+            except OSError:
+                pass
+
+        return None
+
+    _machine_id = _generate()
+    return _machine_id
+
+
+class _ConsoleFrame:
+    """Helper class so that we can reuse the frame console code for the
+    standalone console.
+    """
+
+    def __init__(self, namespace: t.Dict[str, t.Any]):
+        self.console = Console(namespace)
+        self.id = 0
+
+
+def get_pin_and_cookie_name(
+    app: "WSGIApplication",
+) -> t.Union[t.Tuple[str, str], t.Tuple[None, None]]:
+    """Given an application object this returns a semi-stable 9 digit pin
+    code and a random key.  The hope is that this is stable between
+    restarts to not make debugging particularly frustrating.  If the pin
+    was forcefully disabled this returns `None`.
+
+    Second item in the resulting tuple is the cookie name for remembering.
+    """
+    pin = os.environ.get("WERKZEUG_DEBUG_PIN")
+    rv = None
+    num = None
+
+    # Pin was explicitly disabled
+    if pin == "off":
+        return None, None
+
+    # Pin was provided explicitly
+    if pin is not None and pin.replace("-", "").isdigit():
+        # If there are separators in the pin, return it directly
+        if "-" in pin:
+            rv = pin
+        else:
+            num = pin
+
+    modname = getattr(app, "__module__", t.cast(object, app).__class__.__module__)
+    username: t.Optional[str]
+
+    try:
+        # getuser imports the pwd module, which does not exist in Google
+        # App Engine. It may also raise a KeyError if the UID does not
+        # have a username, such as in Docker.
+        username = getpass.getuser()
+    except (ImportError, KeyError):
+        username = None
+
+    mod = sys.modules.get(modname)
+
+    # This information only exists to make the cookie unique on the
+    # computer, not as a security feature.
+    probably_public_bits = [
+        username,
+        modname,
+        getattr(app, "__name__", type(app).__name__),
+        getattr(mod, "__file__", None),
+    ]
+
+    # This information is here to make it harder for an attacker to
+    # guess the cookie name.  They are unlikely to be contained anywhere
+    # within the unauthenticated debug page.
+    private_bits = [str(uuid.getnode()), get_machine_id()]
+
+    h = hashlib.sha1()
+    for bit in chain(probably_public_bits, private_bits):
+        if not bit:
+            continue
+        if isinstance(bit, str):
+            bit = bit.encode("utf-8")
+        h.update(bit)
+    h.update(b"cookiesalt")
+
+    cookie_name = f"__wzd{h.hexdigest()[:20]}"
+
+    # If we need to generate a pin we salt it a bit more so that we don't
+    # end up with the same value and generate out 9 digits
+    if num is None:
+        h.update(b"pinsalt")
+        num = f"{int(h.hexdigest(), 16):09d}"[:9]
+
+    # Format the pincode in groups of digits for easier remembering if
+    # we don't have a result yet.
+    if rv is None:
+        for group_size in 5, 4, 3:
+            if len(num) % group_size == 0:
+                rv = "-".join(
+                    num[x : x + group_size].rjust(group_size, "0")
+                    for x in range(0, len(num), group_size)
+                )
+                break
+        else:
+            rv = num
+
+    return rv, cookie_name
+
+
+class DebuggedApplication:
+    """Enables debugging support for a given application::
+
+        from werkzeug.debug import DebuggedApplication
+        from myapp import app
+        app = DebuggedApplication(app, evalex=True)
+
+    The `evalex` keyword argument allows evaluating expressions in a
+    traceback's frame context.
+
+    :param app: the WSGI application to run debugged.
+    :param evalex: enable exception evaluation feature (interactive
+                   debugging).  This requires a non-forking server.
+    :param request_key: The key that points to the request object in ths
+                        environment.  This parameter is ignored in current
+                        versions.
+    :param console_path: the URL for a general purpose console.
+    :param console_init_func: the function that is executed before starting
+                              the general purpose console.  The return value
+                              is used as initial namespace.
+    :param show_hidden_frames: by default hidden traceback frames are skipped.
+                               You can show them by setting this parameter
+                               to `True`.
+    :param pin_security: can be used to disable the pin based security system.
+    :param pin_logging: enables the logging of the pin system.
+    """
+
+    _pin: str
+    _pin_cookie: str
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        evalex: bool = False,
+        request_key: str = "werkzeug.request",
+        console_path: str = "/console",
+        console_init_func: t.Optional[t.Callable[[], t.Dict[str, t.Any]]] = None,
+        show_hidden_frames: bool = False,
+        pin_security: bool = True,
+        pin_logging: bool = True,
+    ) -> None:
+        if not console_init_func:
+            console_init_func = None
+        self.app = app
+        self.evalex = evalex
+        self.frames: t.Dict[int, t.Union[Frame, _ConsoleFrame]] = {}
+        self.tracebacks: t.Dict[int, Traceback] = {}
+        self.request_key = request_key
+        self.console_path = console_path
+        self.console_init_func = console_init_func
+        self.show_hidden_frames = show_hidden_frames
+        self.secret = gen_salt(20)
+        self._failed_pin_auth = 0
+
+        self.pin_logging = pin_logging
+        if pin_security:
+            # Print out the pin for the debugger on standard out.
+            if os.environ.get("WERKZEUG_RUN_MAIN") == "true" and pin_logging:
+                _log("warning", " * Debugger is active!")
+                if self.pin is None:
+                    _log("warning", " * Debugger PIN disabled. DEBUGGER UNSECURED!")
+                else:
+                    _log("info", " * Debugger PIN: %s", self.pin)
+        else:
+            self.pin = None
+
+    @property
+    def pin(self) -> t.Optional[str]:
+        if not hasattr(self, "_pin"):
+            pin_cookie = get_pin_and_cookie_name(self.app)
+            self._pin, self._pin_cookie = pin_cookie  # type: ignore
+        return self._pin
+
+    @pin.setter
+    def pin(self, value: str) -> None:
+        self._pin = value
+
+    @property
+    def pin_cookie_name(self) -> str:
+        """The name of the pin cookie."""
+        if not hasattr(self, "_pin_cookie"):
+            pin_cookie = get_pin_and_cookie_name(self.app)
+            self._pin, self._pin_cookie = pin_cookie  # type: ignore
+        return self._pin_cookie
+
+    def debug_application(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterator[bytes]:
+        """Run the application and conserve the traceback frames."""
+        app_iter = None
+        try:
+            app_iter = self.app(environ, start_response)
+            yield from app_iter
+            if hasattr(app_iter, "close"):
+                app_iter.close()  # type: ignore
+        except Exception:
+            if hasattr(app_iter, "close"):
+                app_iter.close()  # type: ignore
+            traceback = get_current_traceback(
+                skip=1,
+                show_hidden_frames=self.show_hidden_frames,
+                ignore_system_exceptions=True,
+            )
+            for frame in traceback.frames:
+                self.frames[frame.id] = frame
+            self.tracebacks[traceback.id] = traceback
+
+            try:
+                start_response(
+                    "500 INTERNAL SERVER ERROR",
+                    [
+                        ("Content-Type", "text/html; charset=utf-8"),
+                        # Disable Chrome's XSS protection, the debug
+                        # output can cause false-positives.
+                        ("X-XSS-Protection", "0"),
+                    ],
+                )
+            except Exception:
+                # if we end up here there has been output but an error
+                # occurred.  in that situation we can do nothing fancy any
+                # more, better log something into the error log and fall
+                # back gracefully.
+                environ["wsgi.errors"].write(
+                    "Debugging middleware caught exception in streamed "
+                    "response at a point where response headers were already "
+                    "sent.\n"
+                )
+            else:
+                is_trusted = bool(self.check_pin_trust(environ))
+                yield traceback.render_full(
+                    evalex=self.evalex, evalex_trusted=is_trusted, secret=self.secret
+                ).encode("utf-8", "replace")
+
+            traceback.log(environ["wsgi.errors"])
+
+    def execute_command(
+        self, request: Request, command: str, frame: t.Union[Frame, _ConsoleFrame]
+    ) -> Response:
+        """Execute a command in a console."""
+        return Response(frame.console.eval(command), mimetype="text/html")
+
+    def display_console(self, request: Request) -> Response:
+        """Display a standalone shell."""
+        if 0 not in self.frames:
+            if self.console_init_func is None:
+                ns = {}
+            else:
+                ns = dict(self.console_init_func())
+            ns.setdefault("app", self.app)
+            self.frames[0] = _ConsoleFrame(ns)
+        is_trusted = bool(self.check_pin_trust(request.environ))
+        return Response(
+            render_console_html(secret=self.secret, evalex_trusted=is_trusted),
+            mimetype="text/html",
+        )
+
+    def get_resource(self, request: Request, filename: str) -> Response:
+        """Return a static resource from the shared folder."""
+        filename = join("shared", basename(filename))
+        try:
+            data = pkgutil.get_data(__package__, filename)
+        except OSError:
+            data = None
+        if data is not None:
+            mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
+            return Response(data, mimetype=mimetype)
+        return Response("Not Found", status=404)
+
+    def check_pin_trust(self, environ: "WSGIEnvironment") -> t.Optional[bool]:
+        """Checks if the request passed the pin test.  This returns `True` if the
+        request is trusted on a pin/cookie basis and returns `False` if not.
+        Additionally if the cookie's stored pin hash is wrong it will return
+        `None` so that appropriate action can be taken.
+        """
+        if self.pin is None:
+            return True
+        val = parse_cookie(environ).get(self.pin_cookie_name)
+        if not val or "|" not in val:
+            return False
+        ts, pin_hash = val.split("|", 1)
+        if not ts.isdigit():
+            return False
+        if pin_hash != hash_pin(self.pin):
+            return None
+        return (time.time() - PIN_TIME) < int(ts)
+
+    def _fail_pin_auth(self) -> None:
+        time.sleep(5.0 if self._failed_pin_auth > 5 else 0.5)
+        self._failed_pin_auth += 1
+
+    def pin_auth(self, request: Request) -> Response:
+        """Authenticates with the pin."""
+        exhausted = False
+        auth = False
+        trust = self.check_pin_trust(request.environ)
+        pin = t.cast(str, self.pin)
+
+        # If the trust return value is `None` it means that the cookie is
+        # set but the stored pin hash value is bad.  This means that the
+        # pin was changed.  In this case we count a bad auth and unset the
+        # cookie.  This way it becomes harder to guess the cookie name
+        # instead of the pin as we still count up failures.
+        bad_cookie = False
+        if trust is None:
+            self._fail_pin_auth()
+            bad_cookie = True
+
+        # If we're trusted, we're authenticated.
+        elif trust:
+            auth = True
+
+        # If we failed too many times, then we're locked out.
+        elif self._failed_pin_auth > 10:
+            exhausted = True
+
+        # Otherwise go through pin based authentication
+        else:
+            entered_pin = request.args["pin"]
+
+            if entered_pin.strip().replace("-", "") == pin.replace("-", ""):
+                self._failed_pin_auth = 0
+                auth = True
+            else:
+                self._fail_pin_auth()
+
+        rv = Response(
+            json.dumps({"auth": auth, "exhausted": exhausted}),
+            mimetype="application/json",
+        )
+        if auth:
+            rv.set_cookie(
+                self.pin_cookie_name,
+                f"{int(time.time())}|{hash_pin(pin)}",
+                httponly=True,
+                samesite="None",
+            )
+        elif bad_cookie:
+            rv.delete_cookie(self.pin_cookie_name)
+        return rv
+
+    def log_pin_request(self) -> Response:
+        """Log the pin if needed."""
+        if self.pin_logging and self.pin is not None:
+            _log(
+                "info", " * To enable the debugger you need to enter the security pin:"
+            )
+            _log("info", " * Debugger pin code: %s", self.pin)
+        return Response("")
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        """Dispatch the requests."""
+        # important: don't ever access a function here that reads the incoming
+        # form data!  Otherwise the application won't have access to that data
+        # any more!
+        request = Request(environ)
+        response = self.debug_application
+        if request.args.get("__debugger__") == "yes":
+            cmd = request.args.get("cmd")
+            arg = request.args.get("f")
+            secret = request.args.get("s")
+            frame = self.frames.get(request.args.get("frm", type=int))  # type: ignore
+            if cmd == "resource" and arg:
+                response = self.get_resource(request, arg)  # type: ignore
+            elif cmd == "pinauth" and secret == self.secret:
+                response = self.pin_auth(request)  # type: ignore
+            elif cmd == "printpin" and secret == self.secret:
+                response = self.log_pin_request()  # type: ignore
+            elif (
+                self.evalex
+                and cmd is not None
+                and frame is not None
+                and self.secret == secret
+                and self.check_pin_trust(environ)
+            ):
+                response = self.execute_command(request, cmd, frame)  # type: ignore
+        elif (
+            self.evalex
+            and self.console_path is not None
+            and request.path == self.console_path
+        ):
+            response = self.display_console(request)  # type: ignore
+        return response(environ, start_response)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/console.py b/venv/lib/python3.7/site-packages/werkzeug/debug/console.py
new file mode 100644
index 00000000..da786603
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/console.py
@@ -0,0 +1,211 @@
+import code
+import sys
+import typing as t
+from html import escape
+from types import CodeType
+
+from ..local import Local
+from .repr import debug_repr
+from .repr import dump
+from .repr import helper
+
+if t.TYPE_CHECKING:
+    import codeop  # noqa: F401
+
+_local = Local()
+
+
+class HTMLStringO:
+    """A StringO version that HTML escapes on write."""
+
+    def __init__(self) -> None:
+        self._buffer: t.List[str] = []
+
+    def isatty(self) -> bool:
+        return False
+
+    def close(self) -> None:
+        pass
+
+    def flush(self) -> None:
+        pass
+
+    def seek(self, n: int, mode: int = 0) -> None:
+        pass
+
+    def readline(self) -> str:
+        if len(self._buffer) == 0:
+            return ""
+        ret = self._buffer[0]
+        del self._buffer[0]
+        return ret
+
+    def reset(self) -> str:
+        val = "".join(self._buffer)
+        del self._buffer[:]
+        return val
+
+    def _write(self, x: str) -> None:
+        if isinstance(x, bytes):
+            x = x.decode("utf-8", "replace")
+        self._buffer.append(x)
+
+    def write(self, x: str) -> None:
+        self._write(escape(x))
+
+    def writelines(self, x: t.Iterable[str]) -> None:
+        self._write(escape("".join(x)))
+
+
+class ThreadedStream:
+    """Thread-local wrapper for sys.stdout for the interactive console."""
+
+    @staticmethod
+    def push() -> None:
+        if not isinstance(sys.stdout, ThreadedStream):
+            sys.stdout = t.cast(t.TextIO, ThreadedStream())
+        _local.stream = HTMLStringO()
+
+    @staticmethod
+    def fetch() -> str:
+        try:
+            stream = _local.stream
+        except AttributeError:
+            return ""
+        return stream.reset()  # type: ignore
+
+    @staticmethod
+    def displayhook(obj: object) -> None:
+        try:
+            stream = _local.stream
+        except AttributeError:
+            return _displayhook(obj)  # type: ignore
+        # stream._write bypasses escaping as debug_repr is
+        # already generating HTML for us.
+        if obj is not None:
+            _local._current_ipy.locals["_"] = obj
+            stream._write(debug_repr(obj))
+
+    def __setattr__(self, name: str, value: t.Any) -> None:
+        raise AttributeError(f"read only attribute {name}")
+
+    def __dir__(self) -> t.List[str]:
+        return dir(sys.__stdout__)
+
+    def __getattribute__(self, name: str) -> t.Any:
+        try:
+            stream = _local.stream
+        except AttributeError:
+            stream = sys.__stdout__
+        return getattr(stream, name)
+
+    def __repr__(self) -> str:
+        return repr(sys.__stdout__)
+
+
+# add the threaded stream as display hook
+_displayhook = sys.displayhook
+sys.displayhook = ThreadedStream.displayhook
+
+
+class _ConsoleLoader:
+    def __init__(self) -> None:
+        self._storage: t.Dict[int, str] = {}
+
+    def register(self, code: CodeType, source: str) -> None:
+        self._storage[id(code)] = source
+        # register code objects of wrapped functions too.
+        for var in code.co_consts:
+            if isinstance(var, CodeType):
+                self._storage[id(var)] = source
+
+    def get_source_by_code(self, code: CodeType) -> t.Optional[str]:
+        try:
+            return self._storage[id(code)]
+        except KeyError:
+            return None
+
+
+class _InteractiveConsole(code.InteractiveInterpreter):
+    locals: t.Dict[str, t.Any]
+
+    def __init__(self, globals: t.Dict[str, t.Any], locals: t.Dict[str, t.Any]) -> None:
+        self.loader = _ConsoleLoader()
+        locals = {
+            **globals,
+            **locals,
+            "dump": dump,
+            "help": helper,
+            "__loader__": self.loader,
+        }
+        super().__init__(locals)
+        original_compile = self.compile
+
+        def compile(source: str, filename: str, symbol: str) -> CodeType:
+            code = original_compile(source, filename, symbol)
+            self.loader.register(code, source)
+            return code
+
+        self.compile = compile
+        self.more = False
+        self.buffer: t.List[str] = []
+
+    def runsource(self, source: str, **kwargs: t.Any) -> str:  # type: ignore
+        source = f"{source.rstrip()}\n"
+        ThreadedStream.push()
+        prompt = "... " if self.more else ">>> "
+        try:
+            source_to_eval = "".join(self.buffer + [source])
+            if super().runsource(source_to_eval, "<debugger>", "single"):
+                self.more = True
+                self.buffer.append(source)
+            else:
+                self.more = False
+                del self.buffer[:]
+        finally:
+            output = ThreadedStream.fetch()
+        return prompt + escape(source) + output
+
+    def runcode(self, code: CodeType) -> None:
+        try:
+            exec(code, self.locals)
+        except Exception:
+            self.showtraceback()
+
+    def showtraceback(self) -> None:
+        from .tbtools import get_current_traceback
+
+        tb = get_current_traceback(skip=1)
+        sys.stdout._write(tb.render_summary())  # type: ignore
+
+    def showsyntaxerror(self, filename: t.Optional[str] = None) -> None:
+        from .tbtools import get_current_traceback
+
+        tb = get_current_traceback(skip=4)
+        sys.stdout._write(tb.render_summary())  # type: ignore
+
+    def write(self, data: str) -> None:
+        sys.stdout.write(data)
+
+
+class Console:
+    """An interactive console."""
+
+    def __init__(
+        self,
+        globals: t.Optional[t.Dict[str, t.Any]] = None,
+        locals: t.Optional[t.Dict[str, t.Any]] = None,
+    ) -> None:
+        if locals is None:
+            locals = {}
+        if globals is None:
+            globals = {}
+        self._ipy = _InteractiveConsole(globals, locals)
+
+    def eval(self, code: str) -> str:
+        _local._current_ipy = self._ipy
+        old_sys_stdout = sys.stdout
+        try:
+            return self._ipy.runsource(code)
+        finally:
+            sys.stdout = old_sys_stdout
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/repr.py b/venv/lib/python3.7/site-packages/werkzeug/debug/repr.py
new file mode 100644
index 00000000..7d847b03
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/repr.py
@@ -0,0 +1,284 @@
+"""Object representations for debugging purposes. Unlike the default
+repr, these expose more information and produce HTML instead of ASCII.
+
+Together with the CSS and JavaScript of the debugger this gives a
+colorful and more compact output.
+"""
+import codecs
+import re
+import sys
+import typing as t
+from collections import deque
+from html import escape
+from traceback import format_exception_only
+
+missing = object()
+_paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}")
+RegexType = type(_paragraph_re)
+
+HELP_HTML = """\
+<div class=box>
+  <h3>%(title)s</h3>
+  <pre class=help>%(text)s</pre>
+</div>\
+"""
+OBJECT_DUMP_HTML = """\
+<div class=box>
+  <h3>%(title)s</h3>
+  %(repr)s
+  <table>%(items)s</table>
+</div>\
+"""
+
+
+def debug_repr(obj: object) -> str:
+    """Creates a debug repr of an object as HTML string."""
+    return DebugReprGenerator().repr(obj)
+
+
+def dump(obj: object = missing) -> None:
+    """Print the object details to stdout._write (for the interactive
+    console of the web debugger.
+    """
+    gen = DebugReprGenerator()
+    if obj is missing:
+        rv = gen.dump_locals(sys._getframe(1).f_locals)
+    else:
+        rv = gen.dump_object(obj)
+    sys.stdout._write(rv)  # type: ignore
+
+
+class _Helper:
+    """Displays an HTML version of the normal help, for the interactive
+    debugger only because it requires a patched sys.stdout.
+    """
+
+    def __repr__(self) -> str:
+        return "Type help(object) for help about object."
+
+    def __call__(self, topic: t.Optional[t.Any] = None) -> None:
+        if topic is None:
+            sys.stdout._write(f"<span class=help>{self!r}</span>")  # type: ignore
+            return
+        import pydoc
+
+        pydoc.help(topic)
+        rv = sys.stdout.reset()  # type: ignore
+        if isinstance(rv, bytes):
+            rv = rv.decode("utf-8", "ignore")
+        paragraphs = _paragraph_re.split(rv)
+        if len(paragraphs) > 1:
+            title = paragraphs[0]
+            text = "\n\n".join(paragraphs[1:])
+        else:
+            title = "Help"
+            text = paragraphs[0]
+        sys.stdout._write(HELP_HTML % {"title": title, "text": text})  # type: ignore
+
+
+helper = _Helper()
+
+
+def _add_subclass_info(
+    inner: str, obj: object, base: t.Union[t.Type, t.Tuple[t.Type, ...]]
+) -> str:
+    if isinstance(base, tuple):
+        for base in base:
+            if type(obj) is base:
+                return inner
+    elif type(obj) is base:
+        return inner
+    module = ""
+    if obj.__class__.__module__ not in ("__builtin__", "exceptions"):
+        module = f'<span class="module">{obj.__class__.__module__}.</span>'
+    return f"{module}{type(obj).__name__}({inner})"
+
+
+def _sequence_repr_maker(
+    left: str, right: str, base: t.Type, limit: int = 8
+) -> t.Callable[["DebugReprGenerator", t.Iterable, bool], str]:
+    def proxy(self: "DebugReprGenerator", obj: t.Iterable, recursive: bool) -> str:
+        if recursive:
+            return _add_subclass_info(f"{left}...{right}", obj, base)
+        buf = [left]
+        have_extended_section = False
+        for idx, item in enumerate(obj):
+            if idx:
+                buf.append(", ")
+            if idx == limit:
+                buf.append('<span class="extended">')
+                have_extended_section = True
+            buf.append(self.repr(item))
+        if have_extended_section:
+            buf.append("</span>")
+        buf.append(right)
+        return _add_subclass_info("".join(buf), obj, base)
+
+    return proxy
+
+
+class DebugReprGenerator:
+    def __init__(self) -> None:
+        self._stack: t.List[t.Any] = []
+
+    list_repr = _sequence_repr_maker("[", "]", list)
+    tuple_repr = _sequence_repr_maker("(", ")", tuple)
+    set_repr = _sequence_repr_maker("set([", "])", set)
+    frozenset_repr = _sequence_repr_maker("frozenset([", "])", frozenset)
+    deque_repr = _sequence_repr_maker(
+        '<span class="module">collections.</span>deque([', "])", deque
+    )
+
+    def regex_repr(self, obj: t.Pattern) -> str:
+        pattern = repr(obj.pattern)
+        pattern = codecs.decode(pattern, "unicode-escape", "ignore")  # type: ignore
+        pattern = f"r{pattern}"
+        return f're.compile(<span class="string regex">{pattern}</span>)'
+
+    def string_repr(self, obj: t.Union[str, bytes], limit: int = 70) -> str:
+        buf = ['<span class="string">']
+        r = repr(obj)
+
+        # shorten the repr when the hidden part would be at least 3 chars
+        if len(r) - limit > 2:
+            buf.extend(
+                (
+                    escape(r[:limit]),
+                    '<span class="extended">',
+                    escape(r[limit:]),
+                    "</span>",
+                )
+            )
+        else:
+            buf.append(escape(r))
+
+        buf.append("</span>")
+        out = "".join(buf)
+
+        # if the repr looks like a standard string, add subclass info if needed
+        if r[0] in "'\"" or (r[0] == "b" and r[1] in "'\""):
+            return _add_subclass_info(out, obj, (bytes, str))
+
+        # otherwise, assume the repr distinguishes the subclass already
+        return out
+
+    def dict_repr(
+        self,
+        d: t.Union[t.Dict[int, None], t.Dict[str, int], t.Dict[t.Union[str, int], int]],
+        recursive: bool,
+        limit: int = 5,
+    ) -> str:
+        if recursive:
+            return _add_subclass_info("{...}", d, dict)
+        buf = ["{"]
+        have_extended_section = False
+        for idx, (key, value) in enumerate(d.items()):
+            if idx:
+                buf.append(", ")
+            if idx == limit - 1:
+                buf.append('<span class="extended">')
+                have_extended_section = True
+            buf.append(
+                f'<span class="pair"><span class="key">{self.repr(key)}</span>:'
+                f' <span class="value">{self.repr(value)}</span></span>'
+            )
+        if have_extended_section:
+            buf.append("</span>")
+        buf.append("}")
+        return _add_subclass_info("".join(buf), d, dict)
+
+    def object_repr(
+        self, obj: t.Optional[t.Union[t.Type[dict], t.Callable, t.Type[list]]]
+    ) -> str:
+        r = repr(obj)
+        return f'<span class="object">{escape(r)}</span>'
+
+    def dispatch_repr(self, obj: t.Any, recursive: bool) -> str:
+        if obj is helper:
+            return f'<span class="help">{helper!r}</span>'
+        if isinstance(obj, (int, float, complex)):
+            return f'<span class="number">{obj!r}</span>'
+        if isinstance(obj, str) or isinstance(obj, bytes):
+            return self.string_repr(obj)
+        if isinstance(obj, RegexType):
+            return self.regex_repr(obj)
+        if isinstance(obj, list):
+            return self.list_repr(obj, recursive)
+        if isinstance(obj, tuple):
+            return self.tuple_repr(obj, recursive)
+        if isinstance(obj, set):
+            return self.set_repr(obj, recursive)
+        if isinstance(obj, frozenset):
+            return self.frozenset_repr(obj, recursive)
+        if isinstance(obj, dict):
+            return self.dict_repr(obj, recursive)
+        if isinstance(obj, deque):
+            return self.deque_repr(obj, recursive)
+        return self.object_repr(obj)
+
+    def fallback_repr(self) -> str:
+        try:
+            info = "".join(format_exception_only(*sys.exc_info()[:2]))
+        except Exception:
+            info = "?"
+        return (
+            '<span class="brokenrepr">'
+            f"&lt;broken repr ({escape(info.strip())})&gt;</span>"
+        )
+
+    def repr(self, obj: object) -> str:
+        recursive = False
+        for item in self._stack:
+            if item is obj:
+                recursive = True
+                break
+        self._stack.append(obj)
+        try:
+            try:
+                return self.dispatch_repr(obj, recursive)
+            except Exception:
+                return self.fallback_repr()
+        finally:
+            self._stack.pop()
+
+    def dump_object(self, obj: object) -> str:
+        repr = None
+        items: t.Optional[t.List[t.Tuple[str, str]]] = None
+
+        if isinstance(obj, dict):
+            title = "Contents of"
+            items = []
+            for key, value in obj.items():
+                if not isinstance(key, str):
+                    items = None
+                    break
+                items.append((key, self.repr(value)))
+        if items is None:
+            items = []
+            repr = self.repr(obj)
+            for key in dir(obj):
+                try:
+                    items.append((key, self.repr(getattr(obj, key))))
+                except Exception:
+                    pass
+            title = "Details for"
+        title += f" {object.__repr__(obj)[1:-1]}"
+        return self.render_object_dump(items, title, repr)
+
+    def dump_locals(self, d: t.Dict[str, t.Any]) -> str:
+        items = [(key, self.repr(value)) for key, value in d.items()]
+        return self.render_object_dump(items, "Local variables in frame")
+
+    def render_object_dump(
+        self, items: t.List[t.Tuple[str, str]], title: str, repr: t.Optional[str] = None
+    ) -> str:
+        html_items = []
+        for key, value in items:
+            html_items.append(f"<tr><th>{escape(key)}<td><pre class=repr>{value}</pre>")
+        if not html_items:
+            html_items.append("<tr><td><em>Nothing</em>")
+        return OBJECT_DUMP_HTML % {
+            "title": escape(title),
+            "repr": f"<pre class=repr>{repr if repr else ''}</pre>",
+            "items": "\n".join(html_items),
+        }
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/FONT_LICENSE b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/FONT_LICENSE
new file mode 100644
index 00000000..ae78a8f9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/FONT_LICENSE
@@ -0,0 +1,96 @@
+-------------------------------
+UBUNTU FONT LICENCE Version 1.0
+-------------------------------
+
+PREAMBLE
+This licence allows the licensed fonts to be used, studied, modified and
+redistributed freely. The fonts, including any derivative works, can be
+bundled, embedded, and redistributed provided the terms of this licence
+are met. The fonts and derivatives, however, cannot be released under
+any other licence. The requirement for fonts to remain under this
+licence does not require any document created using the fonts or their
+derivatives to be published under this licence, as long as the primary
+purpose of the document is not to be a vehicle for the distribution of
+the fonts.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this licence and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Original Version" refers to the collection of Font Software components
+as received under this licence.
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to
+a new environment.
+
+"Copyright Holder(s)" refers to all individuals and companies who have a
+copyright ownership of the Font Software.
+
+"Substantially Changed" refers to Modified Versions which can be easily
+identified as dissimilar to the Font Software by users of the Font
+Software comparing the Original Version with the Modified Version.
+
+To "Propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification and with or without charging
+a redistribution fee), making available to the public, and in some
+countries other activities as well.
+
+PERMISSION & CONDITIONS
+This licence does not grant any rights under trademark law and all such
+rights are reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of the Font Software, to propagate the Font Software, subject to
+the below conditions:
+
+1) Each copy of the Font Software must contain the above copyright
+notice and this licence. These can be included either as stand-alone
+text files, human-readable headers or in the appropriate machine-
+readable metadata fields within text or binary files as long as those
+fields can be easily viewed by the user.
+
+2) The font name complies with the following:
+(a) The Original Version must retain its name, unmodified.
+(b) Modified Versions which are Substantially Changed must be renamed to
+avoid use of the name of the Original Version or similar names entirely.
+(c) Modified Versions which are not Substantially Changed must be
+renamed to both (i) retain the name of the Original Version and (ii) add
+additional naming elements to distinguish the Modified Version from the
+Original Version. The name of such Modified Versions must be the name of
+the Original Version, with "derivative X" where X represents the name of
+the new work, appended to that name.
+
+3) The name(s) of the Copyright Holder(s) and any contributor to the
+Font Software shall not be used to promote, endorse or advertise any
+Modified Version, except (i) as required by this licence, (ii) to
+acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with
+their explicit written permission.
+
+4) The Font Software, modified or unmodified, in part or in whole, must
+be distributed entirely under this licence, and must not be distributed
+under any other licence. The requirement for fonts to remain under this
+licence does not affect any document created using the Font Software,
+except any version of the Font Software extracted from a document
+created using the Font Software may only be distributed under this
+licence.
+
+TERMINATION
+This licence becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
+COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER
+DEALINGS IN THE FONT SOFTWARE.
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/ICON_LICENSE.md b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/ICON_LICENSE.md
new file mode 100644
index 00000000..3bdbfc73
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/ICON_LICENSE.md
@@ -0,0 +1,6 @@
+Silk icon set 1.3 by Mark James <mjames@gmail.com>
+
+http://www.famfamfam.com/lab/icons/silk/
+
+License: [CC-BY-2.5](https://creativecommons.org/licenses/by/2.5/)
+or [CC-BY-3.0](https://creativecommons.org/licenses/by/3.0/)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/console.png b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/console.png
new file mode 100644
index 0000000000000000000000000000000000000000..c28dd63812d80e416682f835652f8e5824bdccb2
GIT binary patch
literal 507
zcmeAS@N?(olHy`uVBq!ia0y~yU=RRd4mJh`2Kmqb6B!s7SkfJR9T^zbpD<_bdda}R
zAX(xXQ4*Y=R#Ki=l*-_klAn~S;F+74o*I;zm{M7IGS!BGfw9-q#WBR<^wMBkZ)Qh<
zxqIKtb#;&~?&bJa#+8|*{DV1Yrdvnr+Saw-Hhz*{B4Q(OnM-r6&XtC_3S}>w7CdnG
zDLY>IIyR=Cn<G*pExr8P+1dS}XM8s6)Yg0~F%`db>$CNA*=bvkD5+E*cKhZt?dchb
zi*L3}{QWcI%QwR(*1C0D&t5(HV4wYx>;BOaskICJx9fYaj9d2iv#eK{|LN=I{HCQP
z-%jMPNIn$w+%lCzFYDAXso&Mlz8!L#u`#3f_;mH#OQvW|QQ(;KB=^LrYYQ1TvQ~yH
zI+*a`_{tZJ`QcikD_a*f?9nT+lFiy0_5WRiEH^{IA%l`j873>D)&}j3D=!mqoopSy
z<(l7OGpXD|j4GZ&ufA4Yd0RHudEU19Vwc<uxB5ydL~XuV!neWLeN7GT9R^Q_61T;^
zlDhjF>W(N?A3AU%CDroj+*-B`e{Hu+6ucn8vw)EyJL-s!k-#2FW!CBK1|5%gT&R@d
zF!|ROW|MVXn!Q?VzUvJI)rO)~^=>m__szJKbL8Xw6Lnj!uDSnH`fTQ_wrw*SMHm<u
O7(8A5T-G@yGywpf{oU38

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/debugger.js b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/debugger.js
new file mode 100644
index 00000000..cd9efcdf
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/debugger.js
@@ -0,0 +1,359 @@
+docReady(() => {
+  if (!EVALEX_TRUSTED) {
+    initPinBox();
+  }
+  // if we are in console mode, show the console.
+  if (CONSOLE_MODE && EVALEX) {
+    createInteractiveConsole();
+  }
+
+  const frames = document.querySelectorAll("div.traceback div.frame");
+  if (EVALEX) {
+    addConsoleIconToFrames(frames);
+  }
+  addEventListenersToElements(document.querySelectorAll("div.detail"), "click", () =>
+    document.querySelector("div.traceback").scrollIntoView(false)
+  );
+  addToggleFrameTraceback(frames);
+  addToggleTraceTypesOnClick(document.querySelectorAll("h2.traceback"));
+  addInfoPrompt(document.querySelectorAll("span.nojavascript"));
+  wrapPlainTraceback();
+});
+
+function addToggleFrameTraceback(frames) {
+  frames.forEach((frame, i) => {
+    frame.addEventListener("click", () => {
+      frame.getElementsByTagName("pre")[i].parentElement.classList.toggle("expanded");
+    });
+  })
+}
+
+
+function wrapPlainTraceback() {
+  const plainTraceback = document.querySelector("div.plain textarea");
+  const wrapper = document.createElement("pre");
+  const textNode = document.createTextNode(plainTraceback.textContent);
+  wrapper.appendChild(textNode);
+  plainTraceback.replaceWith(wrapper);
+}
+
+function initPinBox() {
+  document.querySelector(".pin-prompt form").addEventListener(
+    "submit",
+    function (event) {
+      event.preventDefault();
+      const pin = encodeURIComponent(this.pin.value);
+      const encodedSecret = encodeURIComponent(SECRET);
+      const btn = this.btn;
+      btn.disabled = true;
+
+      fetch(
+        `${document.location.pathname}?__debugger__=yes&cmd=pinauth&pin=${pin}&s=${encodedSecret}`
+      )
+        .then((res) => res.json())
+        .then(({auth, exhausted}) => {
+          if (auth) {
+            EVALEX_TRUSTED = true;
+            fadeOut(document.getElementsByClassName("pin-prompt")[0]);
+          } else {
+            alert(
+              `Error: ${
+                exhausted
+                  ? "too many attempts.  Restart server to retry."
+                  : "incorrect pin"
+              }`
+            );
+          }
+        })
+        .catch((err) => {
+          alert("Error: Could not verify PIN.  Network error?");
+          console.error(err);
+        })
+        .finally(() => (btn.disabled = false));
+    },
+    false
+  );
+}
+
+function promptForPin() {
+  if (!EVALEX_TRUSTED) {
+    const encodedSecret = encodeURIComponent(SECRET);
+    fetch(
+      `${document.location.pathname}?__debugger__=yes&cmd=printpin&s=${encodedSecret}`
+    );
+    const pinPrompt = document.getElementsByClassName("pin-prompt")[0];
+    fadeIn(pinPrompt);
+    document.querySelector('.pin-prompt input[name="pin"]').focus();
+  }
+}
+
+/**
+ * Helper function for shell initialization
+ */
+function openShell(consoleNode, target, frameID) {
+  promptForPin();
+  if (consoleNode) {
+    slideToggle(consoleNode);
+    return consoleNode;
+  }
+  let historyPos = 0;
+  const history = [""];
+  const consoleElement = createConsole();
+  const output = createConsoleOutput();
+  const form = createConsoleInputForm();
+  const command = createConsoleInput();
+
+  target.parentNode.appendChild(consoleElement);
+  consoleElement.append(output);
+  consoleElement.append(form);
+  form.append(command);
+  command.focus();
+  slideToggle(consoleElement);
+
+  form.addEventListener("submit", (e) => {
+    handleConsoleSubmit(e, command, frameID).then((consoleOutput) => {
+      output.append(consoleOutput);
+      command.focus();
+      consoleElement.scrollTo(0, consoleElement.scrollHeight);
+      const old = history.pop();
+      history.push(command.value);
+      if (typeof old !== "undefined") {
+        history.push(old);
+      }
+      historyPos = history.length - 1;
+      command.value = "";
+    });
+  });
+
+  command.addEventListener("keydown", (e) => {
+    if (e.key === "l" && e.ctrlKey) {
+      output.innerText = "--- screen cleared ---";
+    } else if (e.key === "ArrowUp" || e.key === "ArrowDown") {
+      // Handle up arrow and down arrow.
+      if (e.key === "ArrowUp" && historyPos > 0) {
+        e.preventDefault();
+        historyPos--;
+      } else if (e.key === "ArrowDown" && historyPos < history.length - 1) {
+        historyPos++;
+      }
+      command.value = history[historyPos];
+    }
+    return false;
+  });
+
+  return consoleElement;
+}
+
+function addEventListenersToElements(elements, event, listener) {
+  elements.forEach((el) => el.addEventListener(event, listener));
+}
+
+/**
+ * Add extra info
+ */
+function addInfoPrompt(elements) {
+  for (let i = 0; i < elements.length; i++) {
+    elements[i].innerHTML =
+      "<p>To switch between the interactive traceback and the plaintext " +
+      'one, you can click on the "Traceback" headline. From the text ' +
+      "traceback you can also create a paste of it. " +
+      (!EVALEX
+        ? ""
+        : "For code execution mouse-over the frame you want to debug and " +
+          "click on the console icon on the right side." +
+          "<p>You can execute arbitrary Python code in the stack frames and " +
+          "there are some extra helpers available for introspection:" +
+          "<ul><li><code>dump()</code> shows all variables in the frame" +
+          "<li><code>dump(obj)</code> dumps all that's known about the object</ul>");
+    elements[i].classList.remove("nojavascript");
+  }
+}
+
+function addConsoleIconToFrames(frames) {
+  for (let i = 0; i < frames.length; i++) {
+    let consoleNode = null;
+    const target = frames[i];
+    const frameID = frames[i].id.substring(6);
+
+    for (let j = 0; j < target.getElementsByTagName("pre").length; j++) {
+      const img = createIconForConsole();
+      img.addEventListener("click", (e) => {
+        e.stopPropagation();
+        consoleNode = openShell(consoleNode, target, frameID);
+        return false;
+      });
+      target.getElementsByTagName("pre")[j].append(img);
+    }
+  }
+}
+
+function slideToggle(target) {
+  target.classList.toggle("active");
+}
+
+/**
+ * toggle traceback types on click.
+ */
+function addToggleTraceTypesOnClick(elements) {
+  for (let i = 0; i < elements.length; i++) {
+    elements[i].addEventListener("click", () => {
+      document.querySelector("div.traceback").classList.toggle("hidden");
+      document.querySelector("div.plain").classList.toggle("hidden");
+    });
+    elements[i].style.cursor = "pointer";
+    document.querySelector("div.plain").classList.toggle("hidden");
+  }
+}
+
+function createConsole() {
+  const consoleNode = document.createElement("pre");
+  consoleNode.classList.add("console");
+  consoleNode.classList.add("active");
+  return consoleNode;
+}
+
+function createConsoleOutput() {
+  const output = document.createElement("div");
+  output.classList.add("output");
+  output.innerHTML = "[console ready]";
+  return output;
+}
+
+function createConsoleInputForm() {
+  const form = document.createElement("form");
+  form.innerHTML = "&gt;&gt;&gt; ";
+  return form;
+}
+
+function createConsoleInput() {
+  const command = document.createElement("input");
+  command.type = "text";
+  command.setAttribute("autocomplete", "off");
+  command.setAttribute("spellcheck", false);
+  command.setAttribute("autocapitalize", "off");
+  command.setAttribute("autocorrect", "off");
+  return command;
+}
+
+function createIconForConsole() {
+  const img = document.createElement("img");
+  img.setAttribute("src", "?__debugger__=yes&cmd=resource&f=console.png");
+  img.setAttribute("title", "Open an interactive python shell in this frame");
+  return img;
+}
+
+function createExpansionButtonForConsole() {
+  const expansionButton = document.createElement("a");
+  expansionButton.setAttribute("href", "#");
+  expansionButton.setAttribute("class", "toggle");
+  expansionButton.innerHTML = "&nbsp;&nbsp;";
+  return expansionButton;
+}
+
+function createInteractiveConsole() {
+  const target = document.querySelector("div.console div.inner");
+  while (target.firstChild) {
+    target.removeChild(target.firstChild);
+  }
+  openShell(null, target, 0);
+}
+
+function handleConsoleSubmit(e, command, frameID) {
+  // Prevent page from refreshing.
+  e.preventDefault();
+
+  return new Promise((resolve) => {
+    // Get input command.
+    const cmd = command.value;
+
+    // Setup GET request.
+    const urlPath = "";
+    const params = {
+      __debugger__: "yes",
+      cmd: cmd,
+      frm: frameID,
+      s: SECRET,
+    };
+    const paramString = Object.keys(params)
+      .map((key) => {
+        return "&" + encodeURIComponent(key) + "=" + encodeURIComponent(params[key]);
+      })
+      .join("");
+
+    fetch(urlPath + "?" + paramString)
+      .then((res) => {
+        return res.text();
+      })
+      .then((data) => {
+        const tmp = document.createElement("div");
+        tmp.innerHTML = data;
+        resolve(tmp);
+
+        // Handle expandable span for long list outputs.
+        // Example to test: list(range(13))
+        let wrapperAdded = false;
+        const wrapperSpan = document.createElement("span");
+        const expansionButton = createExpansionButtonForConsole();
+
+        tmp.querySelectorAll("span.extended").forEach((spanToWrap) => {
+          const parentDiv = spanToWrap.parentNode;
+          if (!wrapperAdded) {
+            parentDiv.insertBefore(wrapperSpan, spanToWrap);
+            wrapperAdded = true;
+          }
+          parentDiv.removeChild(spanToWrap);
+          wrapperSpan.append(spanToWrap);
+          spanToWrap.hidden = true;
+
+          expansionButton.addEventListener("click", () => {
+            spanToWrap.hidden = !spanToWrap.hidden;
+            expansionButton.classList.toggle("open");
+            return false;
+          });
+        });
+
+        // Add expansion button at end of wrapper.
+        if (wrapperAdded) {
+          wrapperSpan.append(expansionButton);
+        }
+      })
+      .catch((err) => {
+        console.error(err);
+      });
+    return false;
+  });
+}
+
+function fadeOut(element) {
+  element.style.opacity = 1;
+
+  (function fade() {
+    element.style.opacity -= 0.1;
+    if (element.style.opacity < 0) {
+      element.style.display = "none";
+    } else {
+      requestAnimationFrame(fade);
+    }
+  })();
+}
+
+function fadeIn(element, display) {
+  element.style.opacity = 0;
+  element.style.display = display || "block";
+
+  (function fade() {
+    let val = parseFloat(element.style.opacity) + 0.1;
+    if (val <= 1) {
+      element.style.opacity = val;
+      requestAnimationFrame(fade);
+    }
+  })();
+}
+
+function docReady(fn) {
+  if (document.readyState === "complete" || document.readyState === "interactive") {
+    setTimeout(fn, 1);
+  } else {
+    document.addEventListener("DOMContentLoaded", fn);
+  }
+}
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/less.png b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/less.png
new file mode 100644
index 0000000000000000000000000000000000000000..5efefd62b43e4f11dd300be4355a4b413c7a70d2
GIT binary patch
literal 191
zcmeAS@N?(olHy`uVBq!ia0y~yVBiK}4mJh`hLvl|)fgBU7>k44ofy`glX=O&z`&N|
z?e4<R0fHqH7(uEy3p^r=85p>QL70(Y)*J~21_t&LPhVH|8%$E-8a!6<?Ys;O4ECNb
zjv*44li$2}@#DOMN<v~nLV||AzWRy{n>SBx>}+Ilus*7DLdMO_jfeIB|NqC=Nklg7
k%Tv4CJa_I~7fm*XJ|kXVM=AT|3=9kmp00i_>zopr0Fcf#YXATM

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/more.png b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/more.png
new file mode 100644
index 0000000000000000000000000000000000000000..804fa226fe3ed9e6cc2bd044a848f33a2d7b4e4f
GIT binary patch
literal 200
zcmeAS@N?(olHy`uVBq!ia0y~yVBiK}4mJh`hLvl|)fgBU7>k44ofy`glX=O&z`&N|
z?e4<x9|RZdT|SwCfq}EYBeIx*fm;}a85w5Hkzin8U@!6Xb!ET7BqgrFVEpnl$QTb#
z7sn8Z%X7~=axoZkFdta*X8zx%%B|I(HVACnX35FtICExeq8vj<hwhY?*5(MGqQ3h4
vvR>QpsmJ#!{B#UYk9ZUHUT%uXA${wg!T|}FB2pO`7#KWV{an^LB{Ts5WN1Dj

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/source.png b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/source.png
new file mode 100644
index 0000000000000000000000000000000000000000..f7ea90419d950f9e69d977a1f5847456d96a5f0b
GIT binary patch
literal 818
zcmeAS@N?(olHy`uVBq!ia0y~yU=RRd4mJh`2Kmqb6B!s7SkfJR9T^zbpD<_bdda}R
zAX(xXQ4*Y=R#Ki=l*-_klAn~S;F+74o*I;zm{M7IGS!BGf$4~+i(`m{r0qfPj5(<y
z4G+K1dH?(W8P&<N6-`A}e-V=IU9t8kw^xZ`Q%bU<z^0W8cC6U7T93v25o^g~C$EBs
ziv*7B<#sgh3~Y($-m)}dX#{8HtYb4hr6<q)`LFoQjf>9(%a^~tSuXB3h5c7rs0G&)
zhK1e?;swex|K7Xt;eE>Hx~z|1d}hZ!dRJom|KO!ZGtK+_R-KyQeBkGN>)ms{M$0~%
zE*-P`*00}>etIk4pFc0twaVng-bx;4@pZx>EG%~x6ez2v?+*U(Wua8RpG)1QuU)qv
z{`&Gdo?qKm>C*dmiu_*71X!D#1Oyme8Io%h(x2V1_%8EC`*`Ckaj*X7oUO8~Ar7tq
zf*~xTj?Ei>T{kQLUcMvWrQ+*HYmUeY!%N8;^13FOV&_&Kcl-Gzrg=s}*(x_4r+25G
z@-ENi+-&Cm_Pz5Wwbbj*@^O+M8#mgdb!~HSRd8I(lYY)DbL(6kjoQiuQ`o&K+ZHQy
z+5Px8b^0>C&~UZyU$Zv{emJG9yn;jhoqWpPzyQs|&+V^A{>$yw(W>P5;(C;!{K84*
zr#ox621KnbulVWF`Y`H9<HN3^nnlN=j1A*Nr~SL*)Ud*d$15>F;>FIVXRqt6-&W@K
z>A;<&$-Qpu<>`;Z9<7dl#pC+4f2)~)U-*fTPAwKuubBZV?)tSxAD?~-s(t^nyHtDX
zl=J&`x+NNHFKEv4ym{-zhi6|T{C#*IG+0_pU{&2J{ODbY@BYt+zx}iRJ}qy{!lDZP
zwX0v}+-o`5yf{OzWM^~X?LWnm=l5*gA^g(Ndyo0<_I~{*GLpe>4#bM^wKiB<Jm7ox
z;q}*+*_Us6x>N<7)|pwk+o3K-)1Xjh;b|F`<1#U<riyMGLn6Aql$3RC&60DwU$-E(
zwEXD79l0B1UKX_%sdXeoPMGbwNwlzhSKr(}-%?)v;8yyy;j!qy#}67F|2h5gmf_~j
dT%Z3lyf3mo5E#SQ&%nUI;OXk;vd$@?2>@refHnXC

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/style.css b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/style.css
new file mode 100644
index 00000000..a45522db
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/style.css
@@ -0,0 +1,163 @@
+@font-face {
+  font-family: 'Ubuntu';
+  font-style: normal;
+  font-weight: normal;
+  src: local('Ubuntu'), local('Ubuntu-Regular'),
+    url('?__debugger__=yes&cmd=resource&f=ubuntu.ttf') format('truetype');
+}
+
+body, input  { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+               'Verdana', sans-serif; color: #000; text-align: center;
+               margin: 1em; padding: 0; font-size: 15px; }
+h1, h2, h3   { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode',
+               'Geneva', 'Verdana', sans-serif; font-weight: normal; }
+
+input        { background-color: #fff; margin: 0; text-align: left;
+               outline: none !important; }
+input[type="submit"] { padding: 3px 6px; }
+a            { color: #11557C; }
+a:hover      { color: #177199; }
+pre, code,
+textarea     { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono',
+               monospace; font-size: 14px; }
+
+div.debugger { text-align: left; padding: 12px; margin: auto;
+               background-color: white; }
+h1           { font-size: 36px; margin: 0 0 0.3em 0; }
+div.detail { cursor: pointer; }
+div.detail p { margin: 0 0 8px 13px; font-size: 14px; white-space: pre-wrap;
+               font-family: monospace; }
+div.explanation { margin: 20px 13px; font-size: 15px; color: #555; }
+div.footer   { font-size: 13px; text-align: right; margin: 30px 0;
+               color: #86989B; }
+
+h2           { font-size: 16px; margin: 1.3em 0 0.0 0; padding: 9px;
+               background-color: #11557C; color: white; }
+h2 em, h3 em { font-style: normal; color: #A5D6D9; font-weight: normal; }
+
+div.traceback, div.plain { border: 1px solid #ddd; margin: 0 0 1em 0; padding: 10px; }
+div.plain p      { margin: 0; }
+div.plain textarea,
+div.plain pre { margin: 10px 0 0 0; padding: 4px;
+                background-color: #E8EFF0; border: 1px solid #D3E7E9; }
+div.plain textarea { width: 99%; height: 300px; }
+div.traceback h3 { font-size: 1em; margin: 0 0 0.8em 0; }
+div.traceback ul { list-style: none; margin: 0; padding: 0 0 0 1em; }
+div.traceback h4 { font-size: 13px; font-weight: normal; margin: 0.7em 0 0.1em 0; }
+div.traceback pre { margin: 0; padding: 5px 0 3px 15px;
+                    background-color: #E8EFF0; border: 1px solid #D3E7E9; }
+div.traceback .library .current { background: white; color: #555; }
+div.traceback .expanded .current { background: #E8EFF0; color: black; }
+div.traceback pre:hover { background-color: #DDECEE; color: black; cursor: pointer; }
+div.traceback div.source.expanded pre + pre { border-top: none; }
+
+div.traceback span.ws { display: none; }
+div.traceback pre.before, div.traceback pre.after { display: none; background: white; }
+div.traceback div.source.expanded pre.before,
+div.traceback div.source.expanded pre.after {
+    display: block;
+}
+
+div.traceback div.source.expanded span.ws {
+    display: inline;
+}
+
+div.traceback blockquote { margin: 1em 0 0 0; padding: 0; white-space: pre-line; }
+div.traceback img { float: right; padding: 2px; margin: -3px 2px 0 0; display: none; }
+div.traceback img:hover { background-color: #ddd; cursor: pointer;
+                          border-color: #BFDDE0; }
+div.traceback pre:hover img { display: block; }
+div.traceback cite.filename { font-style: normal; color: #3B666B; }
+
+pre.console { border: 1px solid #ccc; background: white!important;
+              color: black; padding: 5px!important;
+              margin: 3px 0 0 0!important; cursor: default!important;
+              max-height: 400px; overflow: auto; }
+pre.console form { color: #555; }
+pre.console input { background-color: transparent; color: #555;
+                    width: 90%; font-family: 'Consolas', 'Deja Vu Sans Mono',
+                    'Bitstream Vera Sans Mono', monospace; font-size: 14px;
+                     border: none!important; }
+
+span.string { color: #30799B; }
+span.number { color: #9C1A1C; }
+span.help   { color: #3A7734; }
+span.object { color: #485F6E; }
+span.extended { opacity: 0.5; }
+span.extended:hover { opacity: 1; }
+a.toggle { text-decoration: none; background-repeat: no-repeat;
+           background-position: center center;
+           background-image: url(?__debugger__=yes&cmd=resource&f=more.png); }
+a.toggle:hover { background-color: #444; }
+a.open { background-image: url(?__debugger__=yes&cmd=resource&f=less.png); }
+
+pre.console div.traceback,
+pre.console div.box { margin: 5px 10px; white-space: normal;
+                      border: 1px solid #11557C; padding: 10px;
+                      font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                      'Verdana', sans-serif;  }
+pre.console div.box h3,
+pre.console div.traceback h3 { margin: -10px -10px 10px -10px; padding: 5px;
+                               background: #11557C; color: white; }
+
+pre.console div.traceback pre:hover { cursor: default; background: #E8EFF0; }
+pre.console div.traceback pre.syntaxerror { background: inherit; border: none;
+                                            margin: 20px -10px -10px -10px;
+                                            padding: 10px; border-top: 1px solid #BFDDE0;
+                                            background: #E8EFF0; }
+pre.console div.noframe-traceback pre.syntaxerror { margin-top: -10px; border: none; }
+
+pre.console div.box pre.repr { padding: 0; margin: 0; background-color: white; border: none; }
+pre.console div.box table { margin-top: 6px; }
+pre.console div.box pre { border: none; }
+pre.console div.box pre.help { background-color: white; }
+pre.console div.box pre.help:hover { cursor: default; }
+pre.console table tr { vertical-align: top; }
+div.console { border: 1px solid #ccc; padding: 4px; background-color: #fafafa; }
+
+div.traceback pre, div.console pre {
+    white-space: pre-wrap;       /* css-3 should we be so lucky... */
+    white-space: -moz-pre-wrap;  /* Mozilla, since 1999 */
+    white-space: -pre-wrap;      /* Opera 4-6 ?? */
+    white-space: -o-pre-wrap;    /* Opera 7 ?? */
+    word-wrap: break-word;       /* Internet Explorer 5.5+ */
+    _white-space: pre;           /* IE only hack to re-specify in
+                                 addition to word-wrap  */
+}
+
+
+div.pin-prompt {
+    position: absolute;
+    display: none;
+    top: 0;
+    bottom: 0;
+    left: 0;
+    right: 0;
+    background: rgba(255, 255, 255, 0.8);
+}
+
+div.pin-prompt .inner {
+    background: #eee;
+    padding: 10px 50px;
+    width: 350px;
+    margin: 10% auto 0 auto;
+    border: 1px solid #ccc;
+    border-radius: 2px;
+}
+
+div.exc-divider {
+    margin: 0.7em 0 0 -1em;
+    padding: 0.5em;
+    background: #11557C;
+    color: #ddd;
+    border: 1px solid #ddd;
+}
+
+.console.active {
+    max-height: 0!important;
+    display: none;
+}
+
+.hidden {
+    display: none;
+}
diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/shared/ubuntu.ttf b/venv/lib/python3.7/site-packages/werkzeug/debug/shared/ubuntu.ttf
new file mode 100644
index 0000000000000000000000000000000000000000..8079f938c9fa5aede9a151439f136e267958ccd1
GIT binary patch
literal 70220
zcmZQzWME(rU}RumVPJ4`3-OKe<b21#C}hCEz#!uvtZ&q5_AZ%$fw6;ufgvF|H?d&B
z%k@tg7+3=s7#Ke#mz5}J<tunFF!0}DU|?cMD@e~R+v{?Kfq~tHfq_jqJ+ZieL6U)!
zfl){SWJ`KZW!fa`qE-e5;VBFZ)7E69CZ>F|JYUVgz{tbEz+je<k($UM&9Z}mfzgG5
zfk7oBx1{2fs>VVF2F4c*49pqXsYQADlS<ApFsyQ5V2In6lb@XE7p|qmz#w>sfq`*b
zZem3NTP6EP1_s6q1_lO&yu{qpGyC4WWnkp^!N9=ITaaH|;wCJ_!N4fM!N9;IQBagx
zFw@TO9s>h!1_J|AAvi1;1YZ}F{y!AYZ}XKwp7{j>0|UcJk3ElI^#ADp&aBeROBfg!
zI2f2fq6`cmeavnDof%|Ut^Y^=k7tzzi9qEb)Ge^cErtRHK87#`76v8;0S0}BNCpO0
z1-73I%nXbS%uG^@6Bt+-7?}1jbuln7^gw9FyA1vezZe*pxLH}4*_fD_SRODi{SRPJ
zh=v%<z~JKO>&W21!0~~Bnd84Zg9Y;vhP?_54D-Vn85o#2n6`pKf`Jj@69xt_{hEP+
zQG&sfaT$Xb(|iVFCJP38<~Rlurg;no3||?%7(EyQ7(Ey`8J{qyGg&a0GyG!EVAN-j
zVfe*h#n|%y6_W*n36ljwJd*{36_W*nC6fh%7Lx^oGm`~_Ba;P#9aPK|q#mRfWCjc~
z&R`H_-o@a|G@F5oshuH$ser+T=`2G8lRZNOlPp67(-H<7rlSlIjDidij6WE-m?{|}
zn7A1tn6nvNneH$|F!M3kFr`3gkemdAGLt2P5YrZhV5TVyJj_cO<d~`%B$=WZIG7w6
zlo@>)l0a@|^kv{;vS2V|{P6!DV-15n6CZ;vQy@bS<5dPzra%T$CSwLu#x{l^CN~CC
zCVPe;Mn(n~M#lg9nD`ii7)2QZm=hU-n4B1bn1aCSgFt#f>KP9)XfaM>@L_Ud@L~ML
zAjHheAi~7VAjJ5UfrarlgBoKXgBGJSLlR>k*nS-bXxKAZFhoGZN{`8cK^Uy&1A{JO
z4TCDP5rZ>hCxZs#cLoJexPihRELOu{z!<<F0~X6>P+-hvP-3iMXk_wW2x97D&;^Az
zV-15lV>W{c;~@rJ#_tST%y|qZP^`o_gFy})?x3(o!_2E0IKXiMiU%ajD99klD9GT(
zD9FIbDER*a(=7&3SUiB@0vR(|Fa&}4;J5(A2MmMaB9$S6shA-M9PXg7N5jnV40_<W
zFowhhC_X@#k&%Iuk&(fUk@5deM#ldyK;aIL2T)ufV+#gzaQrwy;sX>XAPkBNCQw|1
z;)IbQi1{gl4fA#eQ^pzwGsYT*aK;)2NyabGyz!pFj^RCnG6*v+Ww2vh%AgF!ISgiu
z>I@2u>I_;8GyX?2W-~}JSup5>*~b}pSgjf4nJ+SUGB7YOL$M4v&oeN*0q1W91{nqx
zh6@Y~3<eAe3@i)+3=9mM47?1o40R0u84VeOnT|2tV!ERsq9COpr=X-@rQo7aqOe9u
z^uho4e?Kt%{}0aZ9AF&^3=0?y8H1oYgcT$dWE7MXEHQL2{{Q*^>;EVJAOAn}f8zhb
zzyJTf`TP38)(4v&Y<#fp!Hfs}4-y_|-a9CNL*TZ+O{i<Z)(}fDvVjXfuyRIFX$Zn#
z9+Y5YU|?opWn<^y<l^Sx<>MC+6cQE@6%&_`l#-T_m6KOcR8m$^Ra4i{)Y8_`)zddH
zG%_|ZH8Z!cw6eCbwX=6{baHlab#wRd^z!!c_45x13<?ej4GWKmjEatljf+o6WN2&e
z?4CSh-r_||mMvYra`mb;YuB&auyOO&E!%c%-?Mw~K8B){)XWPN3k$Lt%CZ<bW-$~q
zFr=4*-Ir6tuwqwZavq42Tg#Bt)H?m}krOA+o;!7VC&NL8%U3U6VqjpXJae9*rKh>K
ztAE18zNym~rp%c;`_R$jg$xYGsz5p!9PA@PLxO_>1N{AbeZ0LqJ>1<~U7Vd99qjFF
zZLF;<EzHeK_4RahG}Kg8l$GQqL<9x+dAYebIoR1)S(up^8FXBg-4YZws3mM*QB(Ht
z&;{|86BRb7B_atVY*0v4*x-h$W`hDqU*ZM@RFw{i3LDbUR60OZI>1#j3Mkkx*y!pg
zxGF1bIP0RUu!AuwFhW^jL$8Z+q{4=`V74!q&7ub8@VY2RDk|#gC@8o}WVk47U`$YO
z-QZT1(dC-pqN~HWk(<j|**TR<SBGIE7q_#rGq<h|!v+oIf{lzCc8p*KlZLC+MkWRh
zUXZmLm{na9Q#J$yM!332D=J2U#TcByW^G_|-oOqvOCfWEL*fR8ZiS6H2fBK82rwii
zX!E2frzFNiY+z1|)Ya)?cJ1nF-yo>HK~vdfgJ#te30)o6)D1eyF0LE2m0fgoyo2Bt
zFm7N~6;M{_`pU3@F+ut58(ke_kwmBno2tN929PAki|}CDz?cYQGi+dY-oOU(pQ0kj
zE!{gD7?N~#6gM;kMnL!q3`x?P7##GqBR4Q5fMgE9WJE(jG7T^ph;j#pqy%L}P{g|?
zC__+LhQx-3Bn4fa4J>M4q`HAc)m7P5VFR;TLQ--Dh)zuHQg(5Hgj-0&1_u`f*9{Jd
z(7<%vXrS+^?3$RcfiWQy6byk88}yY6Hi#%YK|<R*FhT(o2bsYUU@Z_)*9{`h8yFIj
zp}IEcySjkfrQq5HiWpaAa9n#U2S)5<F#i8wqltp_W@82uhDeY*Hi$V#Y+zP%?TScA
z+aRAHow7k8Ax$AdT5*Fz<OasXNacvsNKh;(3vAGQAgu@jk-9oyT`a1uXsTg~H?XK~
zU{~c(h+vXtjs(Tu1{PHXw+$?+%1$;L*i{8KutB(>7<IByh+vdvfGOLc`2eZ{#6mHL
zMRfzSs<X!iHW0mmF~QkGS}_vjAVp9L$LS<#hzB;Xs&3$byNN|rU;`_}5g<1~?15Sg
zb}5R}AgVzQ(o}Fwb%Ck_@lgzA1v$*w1L}Jm?qmXm1=tE@rwy#C9H8j*0Qo{!hiL<|
zs=@{qRgMizs?H!WNGwVyY+wjbh)_;dj#SQ2*x(Ql0g4<@ScBu&J6PE}Fe(BZ+mQ6&
z9fHDx$XmeWpsWo{&LI&S+_b@w3W`%O-yO_{b3MRZP)3I+_XP7n`M!%o**mxkWV15V
zKn8^_&kYQL5gQzo6}l{hOdu9Ax+%LQD7z&pD+nmKbtUfj-;mU`(ZQjsz%?PmN>@jr
zOW8A}OF1~gMjB*`cThyFbQQ>YAqH<o?+_<losCQkP8*dOI|4U4Fm?n-MeGz{U{L4?
ziP*@*=oA^LtK%w>q3jJR>|7O6K!H;enbDOH>7uK%fkA8olj;U0RmKgB%61GJn3U}{
zGBUC8Y~WH(b=ttK>;w|G2Z`H5#MwaN?8;6X7{wTM8Fu_Xu=&4~hTD$+2OOmS3rM)#
z-~4{_|IN(nH|^iV<gj7Fh6Nj#*RNv)NpD)q!Q&>dX2BXJht&bA6IL@PtYTcSigEc8
z4*6wEIOLZu5tU!U#4f*>iCKQpJU01-^SI>~%wv<E&&VV{kC9b=E+dQloS7%&XU`Os
zpEaXaekK#U{0t^{`RPnX^3x_t$xoeFD?erOGWp4jjPjEhrQ|0vO3F{@^N{cFOOf~Q
zW6bZX?_*lu$LP>!VeQt(#3bL_EhgX7T`S+sC@kOAYA4^>t|Z^lo+2;MuF!7K&K%I5
z(9Q&M_NjI=OSg6=Vfi*DX8HB4`&-YqGPg1@%C|H~$~QNjly9nECf~?-LcW2KSze)@
zv7S*_zOK?(zP8$2zNRuozM4r_zKV%SKE5))lBtqONWP*(Qog*jPQI)-MZT1=R=$Lh
zRX(A(pqNRZSfThpF>^7auzXQnerRD`L1;#FbZB~Xd}vy9Y-mb!RA_Q^OlVScWN3tQ
zSZKI&XlU>R-_W25-l2gLyg~yec!v5<@C()VW8Cj~-}ApGvwO62s9UsCsB5%iXucz3
zNbn9uhRtG(tc*JtCwT8*ejen#fg>P#17pVq)nE|m5E!+Atz!d2XjF8>Mn=ZI$kyIo
z1}9nX4YI)z8y3h$dT;Ouj@aM;Vl)ItY+#VxD8}FvsjbaWtgWqG3?j6V0HbzEDVWQk
zT>_>Vp)`XwNGVv7QCnM^L0h{R!~wA(T##xMUU5lDv37AW$O;K?%Y?y|!4<+77!uU`
z{O`{6h(X~0$N&GB-I+KT82-Ee&u4nX%*epb!2AE}|9}7gF|A}^VA}fsBZI(ycV>Q&
z9)=`_kBlCSUzz+E82*1|VECWTApZY7g8+lbe`h8arVOTg%p8o?OnVtP{{LV|WSGO?
z$&kk2$FPCHpOJ^biXnu-nIVrMmf;A43qt~f55ooqSB85GAq)Ww*$lP}F$@U|#tc3T
zfefV#kqkx*t_&Uwu?$-o{2A;RS{a-e@)+6}HZja$<YLfgC}Id=IKt4%kjG%lpv4fu
zV8al^V9KzF;TeM;Ljc2J#uf$-2D^a?TY|PGA*nE9aANqtxPvK;xr=!f^EVbJmRGD2
ztX`~ntkYO`uxYSmvCFaV;Sl4P#3{gO$Hl}ohienp32q*4GwvGhPdtx!Yxsot4)7cC
zr||a(7zv~av<W;BtP-3dlqGacxJCGm$UIRQ(Jx{uVr61~#BWKINZgWCk#vzvlWdc`
zB>6^4OzMKPhx8Sh1X(`WAUQs{Jh>%uKjat5-%wbi$e~!F_)4ipX@l|v<!34eDjU>J
zsk^Cv(=gMRqp?L(Mzcn<Mst&voK~B*jP?@kV>$<Pe(9dmOVZ2J>(QH{KTm&`fr3Gf
z!6icv!y?0NhTn`Bj696WjIJ0xF#2H3U@TzVWIW4wgYhxrJI3Eku9&)*>6p2gMVS?u
z^_VR&+huma?3H<fd6{{i`4aP8<`>LgS$J6Pv%F^c#)`>G#>&X5!#cn^&AQ6E&w7#d
zF6%Qkr)<k?du$ijZm~UM`^fg29haSw-7~vi_5$`Q_7?U&_9^y<9F{ojayaMk$l;eG
zpJR>V6vqutdQLZ-8Jx46k2v3Q{@`NZlHfAMWr52YmmMyTTsd5ITmxKZxVgCbxP5Ui
z@(yDF4W)Vh|IBcnX)6OKgD8Wtg8&yZXT1OmJG;0TvoLQmM>K=I{oQknf<l)1#z+1d
z9WgWz7GYyo)?>6~G!_<NQ&v(lH8C?b65GeaX~oHtBB&xSuL8o1KSMfQTslMc{B4xh
z(2$na&;a!-y%@hT#xVU5P-Ea?U=Uzu;9}Sac7rhk15*yuRt9MXd4}H(%f#gwCFB_8
zn0VR5Wtn(6MfjL_xrDiydANi*n0Yuw*qM3Q#95emSS1*lcv&TwczKwan0f1Ym_>M)
zdF^=^d6<Qy1i6Iyr3JWzxn!gTI7LLI1UW@G<)sDK#3iK!StZ1z1lhzHqy<<d*yLID
zS(#WBr3D0~q@@Jv1*Jp;r34J57^V0H83iT8Mfimog%#MuD_JF^963cQxfp~Qg$12>
zoR}RM?CtIE#UA*3!2aHWzXuovuEicWaNt_(kywU<?X21YAhK5A2%`i@f}fwC{{R?W
zyMihYk^x~u1IE}`Mq_hhc4JXxFjh8KW>+>>HZ=xgV`gJ>Wk$J0$|1@T=}Mi`l|z)l
z>@=m0>B=D?i<B2J{@cFYYqQt(?GWmp&t~v=q5c2KjNh0!7>pTyI?OlGWE9n8WYu5{
zS7j7cWt_psn8D2$$;~Lu&8V%(xPqH;rZ}T`f*7Nix?sDouy(sHC##AAr-Di`E2ju6
zrwXfr0js*MuE7juM&_pF>Wov>8Tr)Z)R{QcO=X*S4Gm0Yu`+hEGG?(dmMctGU{X^^
zQ(zKTU}RMgHe~29(9!D9ouSK=u3N6lr0XSKA;l=wA!aY+FT^Ay&?Y=hm`RvHKi2qc
ztnu4eBm3A`<Fm$Zjf5;MW5Iw?;GWU7w*uGVVq;_9#tK>L3t0+T3LI=_(H3Al*v<k8
z>9<DuZv~FT8pRqKFvP|(f;EB&RU<P~6Fnw%J!VrAGjmg8Q8`9&c0ERAB{oqJIVN#A
zW>FC~c2Q+hJw|nNIYvefUpr}CYk4_qJ=xH}U>zSZVF?d28%JRkLpgOfQ-!doV7CY{
zNx6`CLp2!<ISFoVX?1fA_h_Et0wy62KTcsiEpY`&5pI43T{|83aPG!BCTSjTK5#wB
z%3$*UBeNFM5e8ufc?M$!dj@BQsSat*jt-9NoSa3RoSht;92m4X6(kij6_^#)S*_tT
zku=dXVK!N3uf@owC8njO#jNGZ#wEt3#>LFFPGk+6l$ey76tmPiP6kN^O$KHT1{Y^X
zCnrZI2S?Rf^R>3M`fmKSvTMa_SzQ_I|NeUm#`gC1i~{%KV*keedmAfoEf&HM6tc9`
z*KgNuuLb3E6FVkjBRM8f5k4kmB|Ro{6E$#ni5VN23FDDtW>jW0k<hkMQnJ#PfYBec
z)YY{>_ykM@M#E)n{#8aZwlneQy68(t=)35`Xfs<|Gc#M;f0tnbFd8bs<n-4YT#+*{
zGZ_5;&NPwfD1$bGC4(Ddjl;JYT8wJqj0qx)X~K+>tc+8f7+2Xd&bDXd(q!b8Wn||Q
z=VM}LVRTY(bY?MSQ#2D5Q*bmBWo1)zX0bG5bY?LVbrv<NVR05=aW-QyVN;YaR$)_=
zQI%D2lu;E|aFj4MmQj_dkuVmKkTCXQVN4NaoT18?rph=&g0WkIu~LFDT7pqRRg{H=
zL9$#*N<oQDQPEMsv7Sv)giX<r&8i7h#t3FAFsdssDtNH5IW#e_3$QbBu$SvF>NGL9
zix^9)GIFRgs!FK4urRiYGFFN*a)>griLx+DO7f%`mKib`)^Msbs=FyVDLBbg+A`YK
znDa35?D+p-3kSCck1K<{cI?|&J$rk5NS+Y~XC`o-(qq&Uxc4^pN?hz)V|{^3v9SmA
zW8Z@FQ#)(zk#@c#i~@%w^xq0Ww16^Ey8tL*fXXUxj+4*^Q?&v|7-B(!h6W%-v9N4s
zZeqt|u4amqP}s%I%*FW_+2t6;mGv0a`53{eN<@yy*vQOWosU@<n!v>Em<=Q}&1Ai#
z^&QnU?6t+j)s18pDRKF-D9R~Y3n%d?3v1@MIfj{v#quc_s~H9>92QcQ6aK8LuA}cF
zrY<YEL*3R;mCwsx!$C(vLc>B{*+Nf^f2qAMmu8}YnwPSOmZ+w&s-=sfke0HzisDgD
z31tJuKqY-sbw@jHDJ4w?Mg}j&uMDf1elTz{h&gaF)N^ofbFpwnG1#94SA=g34TM1j
z2Dlbn&BJNV$;0%+$S~W~G~37!T;>M+cW1O`=4aq#NOW*wk>b+gV&ddtoXPc)>n9g;
zHy0xpJ2L|};|FfWE!>O?xEcGn8D+V1xof$Z`MDXn!x<PEn!pKxosXL<g_$LZ!QR;3
z7*xUhjXeel<U%8Xqp`7tg+`$ISPWE_feJQpKE}0P9!`Go@qSJoUX0oew_<hUzxVZh
zkJpX81sXo({O`{21LiI+Q5IDeCN>tvX)MoJzOgX3urRW)bMrGZ{%2;~&CIx-nX!SH
zQHVK-xrmvWftisx0^}}!c6oLtc7ATIBxaTr{O&SQ0~MyAfKs<(oQ>CA>Wt=$dzrQ}
z=rHVY5UiBxlwm64YvW_8U}el?WmMkr|A9j+zmSKrl9K2INyZY%7D*<_nNo}nQjAhk
zqVokAs|C9SnFOZ^FbW8WvMcZld9ZVEi0bHwYH6v<@i2>u>gkH|$eGJA$}y{RD$Zh8
zS60?ekz$k*NEKidkdzeEj%KhwYNT%zD`*@mXsK^${MJa&*f>_u*ccSulKRFVAfYd4
z3_`}*g2wu>#<8&=3Q8MmLlQru2phW{qdlWIJ2;ukGqS6L6FNVmy15;rxgMiFqq(>o
zqd2(r#3;|tq2i{=Cc~i-prvRf$?C}>p)4e%EWzT*Dru#t6`;W(!=~w`!oeTKv{uVk
zQ$&rADTJAylUc@8ncbL8#a&I!U4_k<UD;HInUkM6go#g0MAKJ`0~F8Z45mzsOo|NL
z42BL;tQ(lw*E2F~;N;}xVcNj5o{f!T1H<|NE>N!{_U&6^{l9NP{?vbS;I9!VX&ajw
ziyDh6iz=Hco9_+SyEkAjWAM3i{^!qw8u^S28vhd+wlWkk@G|J_WMKRMz(JIQ+k=gP
zxrvvZi;Iunp4Xq3iI<&?LEl(E7HsZ0<F`ge+J*+^Vj|GQU}~bKzJ*a$!9!Ajk+Fz_
zpToY|Lfkl0k5_^lVuuVP1EVlwI0G|-u7flKs{kt#2de{90|U!?hW!lZ8JHOy6jVGI
zrZX|<ztxUq6u2jKSL&RhL0qh`v8b}l_wS71;Sh7I{(ojTz);D+&7ibdfq@Y;G~&R+
zBkIAx#-qT=$jO|{V1KrdQQ+QL?LtEXbEM|%0Z<zjgw0^BS5UtG|LVUx<1A)=24)6z
z2T>*lW`=c)Od^a-j4aH|DNHGhNeoG#Mxi~UzQ8qTEh?_eXzUr!7#zEk`2@&KOrZLo
ziG}GX12=;_L%oA6PY;`eETb=*gDitDn}DDYgAbbk4<CaMo1$0`$i16o8GRu%1Bm{x
z1*C*w$Nvx8K&luNq<8%Pu$7O&SGu2b$Nvuw!h8(AoZ*53d<?#V{mdYWIfB7n`>i$z
zGYZ@Tb>!a)Tmw^v2EyRByC~S>swR3&%1V4ppcav_ksXssb#87o2s7sK%A2UEnaK0P
zXvXMud-kjYVaqfhU0t6vODGLWd+`hmjKxe_8F&~1988&6y4mF!7#aB4<k^@w*x2}Z
zd)eo*GsUy#v)8jTvorJw@F?&Y@C5KA@D%W{@$fLShBAjS*xL(P>c_q{j)f$kxY%>C
z#&0bR4TS9&&5Z?>1^JlS<1KS^onm>EHFZUq)Wlp({(3W+a7&1Zu!G`>!HR)_@eR{a
z1|5c{4*JaO!s5*0!rko5BJ9k<?BeX=0#ZFZ3=RSez6=f=oD4n;dMc{fT-97mTs;E1
zDm(sv*et~0tFq(&gUy0q`h<go0E4fJJcl5Iue<=i0E3TwKf3}SKZB3>j{h&f5dvat
z<7e<?7w1vk@&CaV0U-t-)gAwTY~f_^Rn-F3$DnA_+VTIzW&s9YEwBtXgAYi?fs=#5
zS1W?6k0+GD{;fSIW{kD9g)A+#wYA@BYa3~6YcmSmds}$S2o!Bc-h$g`pw0!ni(zQM
zD8k3Y&c`gOD5|Jt0;+S(?U+FgUQm3ptJ^Uf8;LPql{K)D6V%m^X0-no!zgcLCnu?^
zEGn#^Eg@^5CdIDDsbXZOlAFuKqT=jmFDIyHq$lXl#>1)~=;0tIp|5WwBc~<9D<CS)
z=ENl{C6o#dXUqQ|8DBFUWw2ma=#aHsk#U+LW4j__q(G(slR!_u@_gm>%FJcTjN;0S
z%G$ANxoS*YYFuijS<H-{%#qAY%su7W?b=N8+KhbKj2zll5|U=7mTGETeH<KeW(@tN
z9Q+KvrUo4RLJU3z;S!?#l5!lpd<;HvpagCF*2rGlSR2$q(0^-btIsGQaL))7M3DYL
zEI5>61+IX53*ge(9MT8SW;6w*Zb;~`K>`QV$Yo;(^#UMGT~NLCB+^&G#8pK?QPNaL
zMnq9t!oXQvg-2D-RMtGeM9NM_+DKJg-Z9Bp%G^MOLrBoy#myR&MSQrVbd_W@q_}1H
z-8qB>xTOu9)y;fGIOOzfRCRqV6gYTzSOPhu7?>Ee7#Ns%n6@(TF^Do`JJ@S5F>(kp
z@`-A*GHS3|urhJ7GV-&s@^-Uxaf<VcFj~ip<crjcFpG3EFz|^ni1hLDviEUvvT}#=
z_pz|BhB4S13%mtoo43aLZ;coQ&KU{3EsBd3xOUg*t&x6QY%Hj$zz`S97;A3FY;G(F
zDc}_u`Iy;_wS=95;|(H%f};MNVl<PpwK5jD^FgPyro!N#htI~1jMEhS!=ki7dE5E_
zN5;2ITN%_DnjDOkvcwq0dia^;nVC44H5rtZyA`BG6r`mU_;@vxmHHGI`lP|-nKXw0
zgReBGD3S)%<Z`?q9uI@Bv^0l87@uH2F9!!GyKo5bGx&0ZGuUfugCbYRQePVc-U@)c
zbI<6SWxF<{lXopH_H8VvgCfTW>zjZg3?8d?%;t)s%6!Zxr}xU+Cpbx&>4751*Fx7;
zO;E}yFNBSqjfIKP?=K?@3k%~awu*E;KX|-|DCtV7S!;+Ih6M%c`tuu`n}EkB9RGi0
zVrJUGAjP1|Q0ibB&dkVaz{beW#>mFT5UI_m-BT_M>S3@8GYad=wQ*15X5v=tac2l;
zU}DhY=IfV}mS$B0mCjsz48Cgp65(w9te`T1kHMD}l$hVzztsi}uz-CU`_?Gd2-GEm
zG}&WgjX|ARP>;)8oR68^juFu*Q!`g$W9MUpmDr{xYK|7^lVjMe6Mam0r5u&5gG|Jw
zOoJ?n${#4($T2eV$eU;=>dEmsGK!_n&N658W0Wunw&f8}vG&zc3vkk!bAgwaTbx_R
z&steV$5_I}0~F_W3=E9l!Rb`hL5Q!Lfq|X1n?rz|pD&b?wU0fF!Tzn0J)^)mfopGt
zEcM?S8W@AZ-&9eQkC}Zbo34|;+Q0RTI~4=mjoAV=F7MY3^i^wJx;qwB7cw(|%8&m{
zM;R0uj2T=R4(wzw`u|}wAA>LRj{iTlfeKoa9_bzbe>k{-N>6D9eSSv;eFevEeti*s
zea8TP#&`ureg%F7E}fn<hB5{weg;Mcce@@z4v=y|r5-MDVa?^n&#$1sYS}LaD%$xO
zd_lAWC#d8Xi%<u}#198<P*ezjgjm5T5L6Do6|$7nHwKq1+HbFDzXin!r1XuAMH_dx
zqW#v;KwDcC+EoUZz^Hv6F;;L#$Bqe9@Tl`K3M19F;-DobvX)^Mj^QR^!t$EpNpj|X
z1{MLv;)1fOB4z((@hTXpDH$sWNa)&Y=o<dwmC;d9(3Rm8P%>B3GTNf$ZYry4;VdU>
zs;A0lk*nrvBCllXASY|Aqr_`bFQl$0CL*sPs$ifZ$tmf@Xuz)`FDfFhDkP?(CdVo1
z3{Kys|Nk+5XF3W_-)nX<FhOF0X~+K`n>iSK89~(+ADI5(pvlSLE61q_E{PR;7+7XA
zuV!W{XJ+(gW@Kg-%wS;TXOL%L;$YC%21T40AA_%Uk07W75Cr9KK|ML?eoh|HB#s0Z
zgD(#cvl<6DPl!dZ^fQB)%%JdxW(ZJ00B*Cq)fPAw7Yk~f$HvAQ3%u1fG6IFSwl=86
zt!`w;WDe;~f&w0tJ=l%R%*{+dK@F+_MMcCIzn?wDY@h05%p>Il&lZ;9)^ZP&?PQVi
z#e#E}8T}+oLu`5Ym92fXR0Eu}G<<S9d3m`-xwZYRm1VSzC0yJYKx3KUwj~pXkQ##^
z1A`zth-B<%5OCmTWDw+GWe8y8;PcnkHUX;zwV4lq)$*gM<rC&(XAEFu5P+y<U}P|7
z@L^(P;$mQ9NN`YNVP#`rVPIR&$|Azb!o~_(!NJVHxSokwgo&AviGfvsm%*2n30$=B
zGWaqDFf@Qx3QFq7p4C39%_yO-kD5?XxP}J8tn8|$s^aq)lcoLzFm8}y)Sl$KJz%>p
zI8B%{%w%F@+{?hsz|NrOAjQni&cw>f$-%VVp244CJp(fX%X$Mg2R0@)2L0H#_hKP^
z2xDV?V?zUTMNvgTMNvg_##8^yK=_3}ql!O@-%=cuK&y3_*;v=JFpID-v$8NTGJp(Q
z4{lL2FtV{QvVi;r@*xW&h-3_4YXGT0@|`|-z(N2z00^cL!xDxD!se`^tm>-fQj8n^
z1xPU_Gwua@ZxYylqW?cLR)fnPU4|?NX{9C&2Wblr4Gp1s1_?$93FZv>3i%FsX8C$<
zP(oJc<ndtRX5`jqV%8Hi&}P(DO=e=~U>0XnXKQDh&bFM5m5q&&gDr!>J}x#ERDazq
zJO{3Yuf=MEhbuwDETB?CSczR7UWT)X$}oy6gGve(Qxi4h@$?N&_EJir29};Wf>LUF
zYEq1TEaEB}D&l6`pk4_GGnFfO2#LuXSW7FJ>S^hkDzfshiijzg=&EbzNNJh=-4E}%
zFflm)PiHP?I?CY65X$h~VeK*(#!ly%&P?g{jG^|75vGjOG#RHWFIQ&jl4Q)3WaMOG
zT*1u9?CD!Ctf#O4UYK#3@G@bh4q?VHVMgJZGAm`6Dr6WVWf*1ZCpa!}WNLS0<ad;J
zWa4lP53aw!{eb%eH*+O-C-+QlX6`bzHnnML%<5|9YT;_kYV{Ay8RwfbnuqxYv<uj@
zD+&uFc#6BVv*{bP>uF~)S2A}pGkY^fGc#E+Gcqf1aC<N_fI5=K+Oe^*_l({e3A{Bj
zvNyg5Z4<;6{*8^j7h4D&75*C=8+$Gm)Y8#67I<r92_6JS8Ut2RgN`{tN(nJ>K@l-{
zqXFLW0gq9EMy4RD<iO*f>PBYfpiVn<$XJYNi-H!Pn4P?Vzm<ZVb*PzfsJNJrl)5C3
zPEe_*XH|ruNo>83wJ$f9V5D*6yfRmp;^`qllkyzoO*}q`Y6~(mu_=kVySZo!@$(4E
zS%+B~gxaeJaQr)`Af;mEsb-%aVqh5E5)#oGsmsUW#l~lxxws%_QMQ?BYJZeVhPz52
zCnv8I&ub=@2xs>Y&>RjI0|V1UrmYNu3=#}?b~5n)|KVW5%it@(&&SIn$OB#y$HvVf
zAucA$D$1(BBgE*#%__=T%*HLk#?8vc$ixg<D#swo0LpOAJdD0P45AFhf;=LEJPd-O
zqHKav44~1L0;vU37o=GDr5K^zIZ0-gN{M*Ue9?MQW+71_Q651j9!G9RHYac|7}5kc
z1~qCxg?DVBz%fBfNqz9dhy-XTkx^Tqo$tsI)+0ywjvT=z43<7}1WFkiFlsC6F{&%F
zD}$P_a?Ik);L#O!CUrqMfq%#BQ#IV2om3)KoSfY>jw;wIDcj5IhU;$3+r46@xA)8y
zyYm<s!xwvdF9r<`Ff&B_|HTx<w1+{0L6gCPp?zz(D5F7{Ftk}FEC6N*G5CNO4!oQU
zz5*&`Vh$Xfj6PzZ@<5PJfWZeeIOr>8z`@`vW@QDc(_}aqeI+f5SyxzEnAUI$YgKA$
zGO%&GO3Bx-GPp6=8@)BMHx{zge`^VD)P8$wqz&znUjcRW-WnRjvMT8@sSAR7Y@n3K
z#tv%ZvN4K^iOVs<)2ce0rNqY0C@gAdXdohDU}zv}lj*Btrfcgl^WT4NeqLp9ZC@*8
zDMJ^n+B6jheQ6m(CskD~K{h59HddwutZXbyLfUS|mYzaFe(o8WjGkW@85tS5WsKC6
z^ksPbxTLfc<TNFDn3xzC8EpQ4WC~?E%An5h%t2Yxfk((AgPBo|Lx{n*U3$7S6BCHV
z1nRB{@bfDvYf6C{T3igiQlO9$;AHTXDq{sHVFf8+1-0!!&8?Lh0z3Y{a1i0;XYdga
z<pA}kML9UR7<@!)_(2_HP$P~Xq(Ty;f}ac2{pJES5x88HBtSw+@*rg(eISN|Fi5X_
zjWSqHnE}-1U;ry%@C5}vID|n2sDlCRS~Cj#gY>cmju{#0gBse}+OgV<vD%<$fE57Z
z?0U?QW&~uYTAYuWvB|R{Oy4xB!bRKFSb<H3Q%T!c$~?kOQPwKl$~i(zl3m5jgWn*g
zJv_WU#(+mafF*#9kB?K$Bi+F+%~L~$JAjp+j|<cWW&)?fiA;MK1Q=u)njB<B1VA1V
z5#VC*72)7y@D&l@VDJ?wV{qUEj|2SxjZJ{_sWeC(g99gnFT+ZCsbY>5a#G?PTnxVA
zHK6v@k8OMmzC5ndf}kki<zny^tl?k<1s<y#gT1!#TWwJH`K`7QqrkVf0^h(LEf4`2
zN>El(Qx}9(QjDNpGic(6T~LmZX`)41U#M29re>;EXkVJezxJ+<jxJ4IPEK9M=&06k
zZFVlE1zhaf;jRB>_14$-GBN&r#|TP45ey7W5=>hexEV|xWZ9Ydx#YQ+IJkIOIEooo
z@UT|0voms5ayv0sGCG2WK)@XdMuC5CjX+aW$IclV7=v09pwSIcMbQYx<^OguD*e01
z7|*md_*~$JU{E+RGDI*iF#ZD1V{15wurqUm4d;Rw4mTQ<`5-1=LpD}0f^pHm{fx5z
z9y4tXz8LiHAUMr`VoCs~`Kw#G1Oym-L5b2qpP#{(Q-YttS6ocJn9+fQ(U;MIgTa@d
zk&$tQrlx}+gRiCoABf;)@MX|s1SL#RhejS$vV!7NzKjQ?i3gMuctCN(1Bxe*5j-n3
z#KEfg7<|R0Kx(CG1Q{9G#Xwnr50u`;1VIrf=&G&)YMAqZ$EH4P=VI_xsbOMwW3V>{
z4Htn%MT9K%L2YDhMtID|YHMo?e2dlAetXQw(11Z(n^Bw5SdfnyxuOHjA|R)8#{Y>t
zy4G$gkfdJes_$u{$R;nFU}PF@t0Ze3Zmng_%NY7^4+|F;yFpAFJgEl?OTrR5Cks4o
zm~z4KZ|)$^%w5dSB+tae!L)*xm9v<A1rJvx3kw^EBbyVG6N4kP?$Cb=iKl;Yv40O3
z8Zd&!{S*b21&sw2nRpTt|E*0-WDI87`q!J$;okwKt^e+W`l=BO3=BJ&wlXj?sO)55
z{eQqgkb}{eft8VU1!FPu3KppGi~`^OK8w{hG!QlxRE|hY1epSHC)fXvj7yoeG6*wN
z?qp#8|G`0olfgG#fKjkm7?l4&YXMe>ih@dd2~I{|ZZ41@7bpwzb1`yoi3mAz^MT@2
zmY>0wuZ9aej?2XgDu`G?ogY>X4hCOv{?i5xU1@{*&Bx+mL1hvsZr+0W`Hb*RBxFp(
z%v8{rajB?jq_dKxo)VXusI!rWQ{sK2Fh^BxL19i`enrnn#<NhrGZivzWsqXH>JS~t
zSINi3D<Hz)+ri6tgNN}32jg-!Ms5jV248LkZXpj2ZV7G;Ze|YdVopvjF-a*=De&rW
zRt6~!VFup=QjDjh7^PSlSc^raL_|fUG#M;GeL4vSCI&80a?}@L@HOBH;7Z_P=8_Q=
z6e$*3AuZ*2K!8!8`T#2<>v>T|eo;nIQ9)rxK_@PxIE^i|EPPvN4~|*T7>D+;*jUK4
zB4cbJq%{Do?imG*B?R7r0tY;J7pol$8mf|G7FX6|RtF~oJ7#mHNO`}|5LNAjm_X?w
zIlu5ot;9s-kRVS<rmafB;UOw=5h)q^`tjjjQvbFyZT)vnF*w9m78+KJKbf{Nurf$H
z@G`Ko6f>=0V|HY%WOM?RKj1R)n4y8HsG^{;Amh(}@rfZJOk4lX2akEWGB7Z!Gi_zi
zWK`YBp!olXgO{kJN1Ea?MW*ixjLT&i(_|ULB^fzI89VrX@G)ibF@EP{<lvCtVEW0y
zc!T2w2h&WBl^jeR9E_P9jN#0T?9&972{N?{G71(8Xz%!cU@Ir1ul7n%d9e-DPLnT|
z10^>VE?y5grJ2%<(#6YU7+Yng$}suMFv@6_fqGS)0y70x3NQ<R6smFYdI)d{fV6TI
z^K&zDaIe%7bC8nr5UbJDl#zFmag=rxbW{S3QS&nRDzWeQe_*?)B%{v;c194}fk#Tt
zV+lJWdyRl27pNKcHdZ@U+eq8q9y})h7F49f#lF?PsvUbJ_Ag{~)#$4BTkU7s0&inu
zjUbT~8(SC)1KO&3tm<q`@HQK$=_Vwstj46y$I8yw&!woQt)|Gu&Bq$g!o|za87ggO
zXC)zFWoIWHsw5cC%Et{=ks#p}79HpySoZI2Pt^8}E4+0onw!ncnwu+h{%!m>tA|mz
zOwb`PI?PLg0bCa_wSe>SGzW!tK1Ky*#&T{(W^M*{hGGG35dm(2Vj*x_P6(QvojDnO
zSwveznK;;sIai1XJ9V?qW@oBmXUt}2jACc>WM^y<m?FTGBv2&4#N;3lAiyLdU?RZ8
zF2Et^$nD7G$l=5Q3N3IbS;of3Lw#=#N=k9D+5+FS|AHrk7~{dcl2}DOR&XkoV-^=Q
z7GyFOvvhKIbCQYo)z8k1Rft!ONluFoQTY3qX{%9dZ-*%p<6m!3J0H{zyTo*qL6||E
z;e~^!AgEC#$idIx%L`(GD;0K7-5|u^3#R!YLusHk187LiL6o1tR}NH($bsq?xiUxr
z2Gzj@D!Taj8GOMd8rMqA51Nc3AUQq;UlCBvBnxVjlreznF9wi(3?Q2sKy??xN)1V<
zN-)jE;42BzA;rnyD_O(urVO%E8Pqya1~mtjYd~vJ-fC-W+k*(CY6@Oqfh#Fci$q&n
z7-<awsJ=2s8c74yS2m{6l`hWZQKqI*<<2gZ(WYq%)}f}RA=U~C)*+^*q1FnF(UBd|
zdV0|vkucgm-9ug7Bi$ZKgTfZnMzLqw!@$QN!w>|jJw9yaW$<MIjeUWJRzTBiybQh)
zW#E961U0^x<v0b4xmU;vRdR4Ja5)L`*YGg7LR$HdcE-22pg9}x(4aP>k(j6mACoqt
zpt7JHqqq{gAgmS7C@^WRc3_F8Z=z<Zk)5KaQGmIeyr~=0p8PzUP*aJ2|Cv^Ev4=AM
z`@k!2rlnyb58A-Wz{Ftl-<=7R7UUR=7(O|0R>&~&g5m=l@3`V0R59_FNl7tif`;QY
zL5V=Kj7wA$Gac|C(t!!6FoHK?K#4#eM<Or=O`dHBHN#bE_(6^T?R*TrQf?wOqM+Us
zs68*b<NpZ<VGaggQ9S{WI=vc5qZG7K<*l~9w)R_a;}nr11fGEsff1-Z0~#U+)sY|p
z?O5bg!7j%PYNV^{G2=`d8t(CCjOsRCs^TW`z9yXZ1wjUQl8Ikcq&b(kvV;JO54*UU
zwGNAMbW<oat)QowRHi))QVbdl^$vV9I2b!Q7{i$uxmC&-BtRL0pTSoGlo2@j7<?r_
zgIE%vLRyE5o54qdm903PrJaR|g@sisL5q=Jwpeh5rmS?0s9YsC10SD&kf^JG6Dzn_
z0d*RUjUmGx-`*O<3LG;6t=0ig;~Kq%l)8-2%8^Y8n#PPlDUOX9Ii;-@Ff=h1$`a8x
z&==;F<_k~MOx2n+)AN>{qAMuX$r-ycDv3DyhiRz!x!Z{|{`&|@dU?67NsPM8|9*4H
z7;CAU$br)zBZKn)kKi$514b1GOICG84n9T>UPdl8#_39otn!S)D})$l3NdyFF$$?N
zD1yR8QI3OEk(HxZPEkZokwZ?7Rhp?x71UD{V(<mgTLc8blcoo@aPxr~9~|O9)2Bw#
zWjq2x48A-Pf(*Vqpq4lfs1V`N({<qB^UyV%!OB?4%4o~#%gV&f%E(b7$LJ)-$STJw
z$M3GksF%*b$WWsT${7*5jJg_b<-*g2nLq<sVl_gbm=WY>@NE;CCd5=O#26;TD8%o|
zU~e278>{`+C^pvK7&Llk1f5Nc1<ftSG74PNHUft{Y#v?E5_KUWXz8JZz>!#ReaHY^
z5oK-!8jgmnKr}W2FWCVVUZA-f$ofN2(FSVs{hT4KXRWB_VkBFWYo9J5FB)tvW1u1`
zreUg}>uoOATB+%3rp_%Z9_N~&psOgs!>g!krxTqfE#S{9$s?gEDWfJU!6T$%<Z2k5
z#LX|r7QhSI5zojV#K6D=YPYa6SUIS0)-(2ldS;A_%*FLA{VYrjEQ~DBQ5_CuCq}kP
zP(%Fh0eeQ!ic8S?z-zIw#|#Y^MU@4W1(i*i)Ndu;V%i$=cRo{M2&C;C{Qo0UBGXm|
zafV0-Juwbm247JS!3-jpK!iXszmU8T6Nk_W$@!9u?8U4rBm^DV9Jy;iz4XmI(B%!F
zeiNvRs{IzUMDg3(Sb=YGv9N3p?v@K0!v^j^EIua2D3=rm`NSA6Ra+g2M0v+#el5=w
zE5^<LA~NE{4ZKYlSN!v}O7YZUU}WH7U|{NC+R7lraNI$wLy)mTfH9qq(VW+vx1E={
zoQH7-7h^joV+ALp1)~Qe(^dYb{7lFA8Nunq0Mw-77vkp;DrR66VPIh80XMSvdAQm5
z*?5Y%*+jUx*+kAWFfy<z2um~iu!7oh!eSn*#cZHiPZ^Ndc{WBier|bgCJt_4AxFqi
z87r)L4D#-^xY(<)+JCjRuf`s07l2RjGitX>2xxPFra!eAVq?Keg`q7BP^%c!BsLZk
z5;qevXX;RMb#hdSR&sQ9Q_VKZFgMRIW!j1u0h{;lPD4GT>O2NU1~~=>=60s73<`{X
z4yqOkj4u=zE#w(1BpA(w7@rF<mUA+;GcmR^GR|gT+#<_3O_mW{qlbfnK~_>kvRF!1
zL`rso(gGzWB`J}aa*XD3jNEe4a!h7|jGcmvB7%%^f+B*&400k2atb1h7ep9E7<RKT
z&SYWa00$fk2QNo4JBtWAJIgd?M(`+GydZ<GnE>M~0Y*&$Pk~4QW)T5KW&vJ-Vg_ar
z24;o_yo|gP*cY%ff!CmMuq(<r$vR0nitsWh2r~H2WMHgdU`%6RoX*Rbz{}{u8^X)P
zz{?^aDCx-I$nMDE#0)M;?d`7}5QMEM))q7dFN6gxDwez&D{&RlMgxr@YsZ3uyuJ1a
zqjqepguv0*LYN$=X)7skRpMAIL_`}DAq<ek#)Ywkv4xC$jO?KHt+5=lIB3C&9HTg>
z(F<w=>oGE3)(X>3G!0Zv<C9ZR5Y&iL4^fZN5Kxep<4aZvG)dG6VZ7G%BxfdL$O*qW
zliMsaf48;$&a`NoJlF5Uzcn*+o`Cxxk_-$?so=bJfAa!KP|O@~Q0Ecw=n`RU6J!(-
z<`NeW6yg>a;1l9z;O7+PVio3M;D`1KcKrVVUa|sC)gqwi<Pzc*Di-Dv5#|yWXJp_9
zk1ol9B*3ja0dRCN2#5<5^D~I>^D`_EXA}q3Vro1B9^#S{BpLZ7^(C43p$S_;+)3C`
z$Wg$N-;o>C_k)HyES!(U#tIzMhIRV1wY5PjO(meqr9eB3_}lr8@bmM7`@*r>v5I=k
z>U_-X%65$A%*N7;#>{ez;*3A|wT#RpCCv=A`LlJ)o$nQ_UYUQ(r9w~6)8AiN+27w&
zE@E2(qx-*qj7<ObChmv?hZ*P`jv%J33<3<@I~lnDe{cu_d69#ggO$6OhhKz;pI<;I
zpJzVLdLCvTfno+$u(vr_Sr|DOS&La1MOYXa^BLxYXTJDZ<XM<FSOggv`T2PSz+>L5
zjx3I_E&ym5hOs`l<UAH@)Q*&mB?SIni#;Z2+z!pmpph*-Ms;IQ5@9qq=3`_R{Oej}
zR-_hZWEiKOV^-zDD3*O8C1|Ce-^!qr^I891K+0NW1_mZ6rmYO3489IpJc2^pLW~UT
z?4W35U>9LvXW-|M=V0RC5Etea6Jijq6mjHc=cwd#VzB>fbkzRuQDa7dd&fXyYmilH
z?R=0Sc0&V3K2~-`QAJZbR&zyDMJ54pTNe*kJ4wd2f1IM0j&81w(*Nc#-e$^>@{CMQ
zP0<Z@)JRB4iwlwuW?*DU`tQ!@%*@Zg&LHc+%f!mbT+hzI($2=z&cLAm7Brf3@2$YK
z1E4_{QDxBblca}TT@RW0J^!2non*lH{}p^~ou-2rBQpc@IwnRDCMHG(7AEE-rX<D`
zh7|D75Tm{TsB8qUQD-)0R2GY6431~!|6{?x$iVgAo#8di4b1iIoUDwfZn$<p;NDw9
z194?jV?kw8<JTZJ?DS+d137_#k-?DBnu(ohD+3!tj)PMt>r7TA3)T$Q3RdRTEW25l
zmN0E$VrpZY#>iyOn9f+v$n3=&#SCh`F*7kSGPAKUGcYhSvoo>OGxsykXJ%$*W6(bu
zd)A0i&{$I67`%Q|QsAhOgg%6Xq;POC&M3^z&8RNS=z7R&;X<!NOk4l0QDc<)_e_lu
zQn&v9_urlI)BkA<%nT9^T#SsYpfhyZnY<b7-!ej0r5YN5;*E*%%Rinhe@Gg!XL4cq
z&CJ2T%wWEW39|H&6VwD}U}j-v0PPVK0@t_)w(>IgGB7eR=%2l-t$mhJ;GX1BZHc>*
zM-2^x#o5)X8}xgbIc8~fg6B+i|9@pX#3ahV$6(5E!+~27)T7hk5M=NL4Jd$?3J8Jf
zI&lsLU+`!>vnGcSgRf@22)M5;0_tB1g8GmapoJD9tIXxi8I6oVMF}4l4}*^}Pm?@Y
zp**P1CB)zh>UTIuffn5bfcgsZpq{3@nWjny3%i35BdCXaz=4Z{(U;u;<S+2>g%AgW
zFFQLwGed`{RG^W5hcSNugFR?E7QB@NyrKv^Aq8%~X}>+9Z4?Xca{kpm0PS%yYO5NV
znVW)U`ao+T#e_jyAkarjK}|VjCD7_IMn)z@V?i!m1yOx<DF=NEUwsiCZ8Lj$9WOIE
zWt#vUJ~?R-7A`4W6+Kr$ZlMroW|rBk>}<k{(njixmt;*nb^h&UVrOF&Qg_hRwo?;g
zW@TZN64zB0<z@Hf;0K3!(El$?GRzzdLJW!w9b2UpK%G!fC~NaF_)3EM)sju1^;5na
zAU1~pH!p(^$7*FE2E}@TRZ2pB!eSi)ys$E#7u1X8WANn-RNV3Z!FEuvDav&)fQ;h-
zaTp*=FakhR1ln(nv_VrP;N?Bv-WnN!1M00dIGltbvjd<WEqJoPTo5!rWd;kE^@6Ut
z7U4GXsxApOjwuSUe8y_tQ35KK8fp%Lj32orG(1vl>{H#;ggBXA3jFnCVNZ_Ma5I*b
z;$vWBF#Z39aX*tNgCfJGoebjt!M$)$T?OimvxB+|yx>8N4_kN{e0gO-sYi%M%tKb$
zUfEwcUzu6iK~T&CoYZ)D8GJ>WSiwmOwA{);88qtzUh&DgT8U2()Fu@a;9>9;?BL}B
z1s+#`40!Yc6tLh0whX?K9kQU-=@vc)Uxq*idu=1=5Icy1bW-1HYsU&)(>?}Sj{<I>
zg2yYMEqw3>BV!}btTHpBiG94Ig0e%brLvoXsHnNTU$C&IgPy95ATNiUnl_JyN3xAg
zil-(IuRj-0Qk<5Xku)ENA1fz2D`<QI6fXanIT-jD6dBgSLxyX|{~Mr?0S(@4;pSlg
zwL2d;i19M`@+sB}fc*|48GHm*DRY4OJD~XnjwVU445&p1ZVP}I4!nE}z94Z1AIa59
zpfmwa9}LVLptRxuE;|(E0{FN=rf>-{_;Pm$iUbPyG1!A99*pcki!njNgP`vDThNG?
zHfY=)#AAeKCQv{KnhPQ?))5nDTqNkAreUcf5aq3A%onSW;%JlLqAG6_ZlUWc$T*8n
zO4is-BRZL#g=x9KUv^F*HMdlIn-mWX@M3W$aC-U8B+4Mgpv>?FG$IR5F(4m-!bJkK
zj29Fd&~O5!An=M?3DAf<sLa^HFTmi#1}d9G`5AnL;)U~tnS`aAI999rtLCdRDew6I
z1GK(cxd|~{(*#;K%;*D38K7|yaD#~t)PVsfE{4@ALj0gW;^bxU<?j$vZs!K2QchtJ
z1|M#2R$=h`F=(B(SfCsz*~x*%1>`zd0~kP~VfNaf&c60r&;$-7g=xRleybfT@a^pp
z(D*8YHfZt&6s(XD5HV31Ms`JT?gQ<v0retP&D4~Y_?XyDHJrk9GYc61&0z})H*^-{
z6$~|VNp_HDOl5SmR4`N$W@cn|O?OdhZetVX2n-SA@Z%KG^hnOL50um}kp_*E2Qe@(
z_A+xY2s5}iD6$AMa<B_A@-T~X@zo2g65;paZRhr6Vdnq^AbSU>Qu(n(kinP1ACggw
z?B9aNKaB*g9RpQ%aj}fBelMFWqcONAX=27Gt7*+6$fKj+Xma92Y!WjE4{HD$x4Vt5
z-#<4d^ORh08EE?7h4BlMFoPU}F2lQ>3>yC*fZ|piS{f)r;}%>>2r&2xf$9$q247H0
z1*zi#MTQKhnv>H-%M<$c`iv6dpq1L7h*Sj`$O9gY_~F3A&ETur!~xDIpqvBFC>)^K
z4^ch_Uk=c$4~GW_gD=NwJqhU!aqtMKB4}7pBuHMVLx49xR{>O{>S}c`fEf(@%#0k&
z4Ek}g;G6=kl^_e;z|*gwwcgrCAl1>|BSw(oL|a>16*aR6im-tSTyt=Qn=*rP7QBdI
zoWbL&ryp#mqU0E7?U<-oQ)d{iASe^46YR;zr?251D6VcSt!gJ^!XYVVALnQp@2W1$
z@z0<lM}Rw!ThPgaaRv)}QlPGbmWU`HqzwUz|8LA343Z2vI~h3tf7r&s;Oi~G$O)=X
zK(WpN$~Ia848FYe+~9x$kqkcEtEB6t-%B(3OEWS_)HAG-lJw&7k`V3y<ucGJdA0y1
zP_d!?)(F(;1r2Bwf+`nCFF%%1+gQ*XI{hpvs4NIwDakGv6f0#dYZqiF63Z)Rq{^=#
zE6&RFL*U;xE^cM}2-AOe7zGSmw1in$*_i$XfZWf-06N=m57Pq%W(Ix+V+R2SK><M~
z4#8Et%=PSys{~j;*^-+_$b+SmE11cX!3)&B1W$1o{WW?P7i(x>tSo4(4%^}^D#Ert
zHdbF!Sy@t2S(%9=PEkfi5rn~Z{onWBl>uZIKZ7)b|4s(J|1Y+HS}lA#{@-vA;b8D(
zkQI<+;*eb>$x|=P%(Y4eJYB=jBjh2_DHaUMM;*)zL6Fg7d$1#p7(-p5t<A0k-b-a{
z1ey*M6k!uaauYL;8ZQr*6t{t9Y_ty4al6@AeVAFSEgAp+V@gS62D=jCQbq<)oc?3x
zV31<??;tA5Aqtw$7GUsY<B(wR<pvdsplstH!2{xPgBo(&48EKK5)8f^pg}YyP!^G7
z@C6NOY!Q)Q1eKf*ws0`|vWbF9T4_dK&{(g7w1_C9k0_{gV^EN0^kq;GlVJ2=5CDmS
zY8D4+eg<Dr1~G<u0Z|bFQ3e4q0S-oAF;)=C3NlBC!51`u;-JJU;K3>*z*En+N?OcQ
zKmb(32?(%>dh+}6dhvL%d9i{c4Lt9nuMH}0{vHqjHSUf<dR>rZMxfFXG|LKUyMTG1
zt|n~6R8$$1-k8me`Is5`<eVL?L~=#VZR})XV<oICjD&K<&8(~$<-}e51C^CMoh^ia
zfl{5Cgq4lCsHBapEdwJ%!2fUHwl+6|t%ES50;nnw;1FW;VN~GYX7J$!m;VABJRaN)
zto6*Rcv$_oJ3!3|aQXOG>K>@|z@W{jtu3y{Xewwf2-=hUF3cmALr6}Hi^<K4sX*Xw
zzqp(bC+PIXAO;3zQ6^yqT}DL*L1j>LPzt=vUKGUSgAKIvfkIJ$!Iy8x{|lQz6GSix
zUQpo(%CVqA6C&{d)N%)H8g&p60L8TsgD)qjQO_YD0NRcO9mfX63%Cqqg_^($HJSy=
z*WdB~z!uN~DG<ZKoRh&<f0a<ZfP)N!FTVhz0GmR+w7(o9zZ@fnoOZqLDm@)9c`rdP
zZ8bG%(3q~YL<ctmGcy~=og56lY@k(_TR|C!8`2sz1~2N<HqwU-*?fC@CoZ<o=&$9S
zSnapZw2@*K9KBEi+)Nb{w_^q^3Wk>@;8j(ENCk^>kV2-bk&8IHoQ|2ijgK~eET^Ql
zlD;iB2S+TIq_!fTf~*8fEaOiOA!#XAAwgbchX@l;aiiy?BPz)yWv2w=Ffp?*{)6@<
zLHX<(lQ4rgLl$VC+y@6f4hCO7P@|I<6jLCNg7yM|%0y5EfkM?mMu5SWs~*~WUnMD8
zFTP4b%!|`YRG@<aH1D(pG@%C$4_MKOJUXh)s15e8924BDd`#?u%0UWQuBgQ(*e}S1
zCfwnm_S9!4Ste2N{`A_N4Dyinr8KA*lK{1hH9#Y~AVLH*ha}R(uv!tc-V5BimTKYx
zR|6oD!G~+Lf;gxHC=MzI#5?!{WI;(uh`|>`gVra>cCZFQ8a;18o1DIZR%Ho%gLW~r
zwL!a@K<ydWLM(RZ24T<+bS7C9N6?I)qY8{p^mlRb2Vq7QyA;suUW%P<Du_<CjYv<A
zh)7Qdm(vVE|J|8nn1sRY=qiT@UQp|c2NWYr5<(2VOa`EKf&i!)BLQmk*YkrGMDT;=
zANWDu;0G5${HvAO#OozjDX}qkNXZ6Bc(a46cy_K1aWF$%B#;5pAO<y#!HcXQ-6v2j
z52@N2!JYxFsshaenuB)^sHuyBCI>-@m2r2Jx0XI1v}K$KZ4JjNCW9NtjLgwVY%KqF
zfZE2c8rI;(@CBxg9KvdDsgS;cJL7+-A9wC#5QOxiK)pZER4^~7Y=HKn9K=C!D#=kV
z4vshw$>1ZtN*UC)bpZ7h7@GLOGN6@sAkE;Gtq323FG!fdhkvya2iVIT;4YRLFM}^@
zhZI;uN;Xi!o0A6=mOP-K;{o?gc{;>Jz<qLUZEYiaP#|d=p>`YI8tFrVNEkfPYi2G8
zZso8;yH$e9rjRDlG-!vCPhZPBDiPe(OjeAA_B<JxHC#a*N=6SBwxnoKUsIUlum1&b
z*AtWuAoXAslQ4rIgQ|lN8>=v=juGPZ;$h?L;AdqBV6Znb0<AU%&EN}w7ELi4$uWsT
z4vk<3&*3r(@yN?bvI%e-Ye*S$#408+2{W^@G6k}5b2I((X8gn>3LbaV`u~M-CX*<G
zE~C{>2G##JHiPE0VAG&H2AmAOJfMLMPEhL82DMcAg&BO+BtRn~s-R#~f@T`<5DR3e
z<%a_&KZCC#hX{kOBB(wVg3??JzM$eA+z$jrvI8F%s5}>B@Z|vQwg4414o<?Lx?TuG
zfLd^nC7Iwv&B3ZD$04U!&&naf%Av?A$125KKVO(pSY<V+l`K`yt)(XiYA(ou7H|qM
z__E6Jf!w=Ul))Fg*F!{v!B?B1LrXnSM7%?o50r2CU}cr@TO;V6Mn-{q*FcjxM`GWi
z&zpfq7N8|JoB~zU+S=gFdup)RGk8QG&z`A)CeKb=nw#0lnQKU-MTqIB$#MwtJLsxg
z@k(pRs9I@?WhcA(@d*mJIm;<3X-M)&Dj6ud2C}kqFb8l5$}0*=i3)M?NvjyEdU%Pm
z1+an7>tJN?`Tv!%iD@N+I76HRzr3Wrq`f4wumT5zFAozhr~xU!E8)Q_=`YF1EM70V
zN`eQ}IChW(HT^j|cm+5>Q@sM9`AUI6ai(?#PX_z9XTifNkhPaa;K2i9Lj!H_=4FtF
zjKQM?;L{OI1<h9;J<21YBB5o>CB)~ZYwD^c7|X=r_d(rQmPeSymt9oa%-@9Z0_ccW
z28dffFmW)5GWa;iu}N?+_=fN>3fJ?C%8N2_h^`Xns^?fGCgREIDGX{TIPinEQ}T5%
zFoX9=F@pv;z$-&R?Wnh~nkAMI-Uktc&Agcl8Z&-S$aJ*{G7^imapzZ%5oe9%=MYkM
z2sdR^`S&3{h>4k*iE%rq4=DsXQ=17iezkiWKe(?5s@fg+L8*@oG=Rtk3LVBB|9^m2
z4ou)>1O@A68z&Dit)>g2A2@gki+S)0@p1^&gJy#mn0YzaS$J7E>LF7|BA|hJCI$up
z1||mP`uQyDK{J{HEKDrWxgud9PYzFZPZlp`FHjVKmNP-7Hm}CU{=FKDGTR24_yNte
zfqL3Sphf4Pw8#jWzENXR2T$rSn=`&p&om4%%v4X|QB+kE6jW1H<YD5N^zTbn79-Cj
z!~EPd4UM$id_x9C1`!4Zrk_k43@XeAx5|Rn0)n=mZ59AGl)<ZcLBX+E9Ngdq^T7qF
zupomks|INE6{sbqEWr<5QNt+!%FO(bwV#_g8GS)ZF184ZGWe)~_9BBK6vS}g;$-l3
zP+>GtVN?N)^{Q!kfHpKc@F}Qz6sa&es064as4#<t_=UwiR1~BY>J?Q)6jjs|LCX0U
zK?Tc!t$Yl=iqZ_A_8lL1_3;M>UtUmao{Q0!K|qGVmq7qzGlK#@h{?s^8_mGTprE1c
z@t1+|H3Q>K2FBeCjI$URyBQd385rXk@)?-C85mD9+-6`B6A)sAY{cW^XY>`*;A8X^
zli-C4iEuIcigCy=_=+iri8J_!35bF>?fuv+0;WGWa0)Z{ih&ADP*^(ff%fz%@PUMQ
zp$Zzf7<|pd7+J&^zlkwE5o4S#wq1;=OpGx_j4?=z@uwIggP62fJu`y{GXuD%D$UH!
z!N=&!F2Tv@%MS7xJJ@IJ0(=a<>=L{nf|J45pPkW~osr!^P0NE_K|$4{mYva`9kiF7
znO#9x+=HE4fV-ZHU4)B^oedghpduF(OQ7zO1E&mwFPj9c^CT(;9(MY$MMwn9cmNuK
z0O{Mt%itTr#t7CTq^|98jg7H|jj@c4(S<FAjR`C)EG+IJ3JO6{0X|01JUciweQ@C9
zV)PZ2kYNCI!X7wGk!0`{l@Md_6;%*r@D&vh7Gdxa1%;ES0w;s7nJ6Q-DC192#x0^p
zM46_EGNy|%hKVwIgT+J@gvC7AL<L0anb|~`+2Wb^GoNQ>22E_dXJ%%05V#<~lpydy
zfJp$llv`EBOVLxoQ_NFZAVr`=fN2ghV=psfHgh#IlK?ZDs3*54mnXXyn-`>y4eqXi
zkD>ujz(R7O<ki?hiL0=AR_)kAFeoYl<vth&32DPcKp7Q0Lt9v=tqoTQnyiJ)%wjb{
zTN`F{VPPTMEJ=Z@+S(GwAo{cs=0U0?NFAiD4c>l&JiE(m4lNIi*_hZFf6LfQN!iH+
z>P4uhaLLFk@e3--NptyfO35h-@+&IHa3!cm=y~i+N!jU<&d4!SKQAv$O+Pa`T~{|f
zJ5ygREiX@h=D#254B)jIjPIE^7{nP?I;gmbfM#?B#07=81;jo182N;_`57RSy8H}~
zNnOy$mjiq*w;nc^%g+EFnglOhKuqJ-L#A=XCHdh~wqC-XLY@Mi{GQxiT!=UV4bEJP
zi#-OLZ)4O(pREN?)-o3I^A{Q#AkEaO8;hG8gU<P3?BrHZ(-0QcP*vbgP);;hlUY-n
zyuu(=S<KARLQ>Mw(oEE)*WvrNZ66&cx-u{_u>5yne8|MXpuq5TCxhhwAE1tr<c|L@
zK%E5^2}V)Sa8m~;H}W&GgNjmi(0BzWJ0mNoTg9NjDJCi+EG{e{BrYt(D$XiYFDx!1
zEUqLBo*xo+;AQX?=HOxQ6$Xvn3G?vsbMbSqvvIO1s4@DoDX43CuyOHo)pN3maI&%T
z^K;5`GI4M!Uf_Sg|AC*GpN*ASh=I97K{^N&R$QK(o}kepaPwYB+_Dfn+ben0s9gd&
z<N(^)1m3vBD0vl>d_jo|yvM0l;D`Wp!WDEl9H?oj9Shoa605BZ-sNLv4y$Jv%|Yo3
zvPxfFlv`QQ+)mOyS0#m8UQI_r%|bFkCBj%l+r&|mTS8ozC6SfW(7b|?w@Wu8FV8sh
zT<+w5AIo$dwM3X%*}#XTGcu?#FfeXr;$RSDaCA^*<>Tb52d!mcg&fbpCd?qr$idGk
z#K*wjF5t<@%GS>A#bE#UAZW_?AZW@sE>_@PEM(3YG*`?FnsYRTZ1-kdE$Zs;FUKhN
zuUNv(*Hil6f5vjgL*gN61;&1MI$4>q3VsZX3|jx)8Qq!r88jH89Jo0Z7&$o@Ie4W7
z!TTOGICwq8>pL}OYA_{f6lpMNuq{wxR1)Ce^-$8(=wM(F5#g5Zkmc>*22Upof%axg
zfTxwUV~x(m#{ShdI%fnr11473$d1XD(Zr6)no(4VO%}98SdLMaQAEsG6x5rvWdsc<
zYDp-Ga|`Ksnz2|KDk~@`8(OlMdFlvpiz`Vm`U^@c3vJPK&=&X9w3_AQAtK`8G|NiU
zQ%uJ}bBmC&v>>=$pY-3IVJG-37?JIa>zF}1SQ!}f^}$EpfRE7;R89&H2Tx3c)pPxK
zXH*CKJ<dTTU4fBJfsu`akwcnSTma-rfqJDjhG`5;xeT=oObqK;8Ntl}Ar4*-RxO4O
z4GlcrKNlNojPQOeqY@vJEU53S&1h<5X3J;-*}G_>rfdr8S;{hkx>bzocmjYO8UQ+C
zr~x3Z4G#cD1|5c*Ok9k042ld94pz$~7&%24D>*tjm{zhgvM@7pD6S7+C}5btz|6uR
z!l1&y%pkCyQ&3V+Q;=CuP>DfNY(2N2fXsSUZU+53Z;u!W{5=AiC)baCdnFdM+6%nW
z4z%PSG}sO5{_!zEj_QDIgfnO2Vi8m@RF*Rqmo}0S5EI~HabOXWM-gC56*173mEaSQ
z=NFge6%sZ?a2S{vJpa2hoM+;IoqH+3!d}nC%*iPx&MeFuz~PN??xnejnYl42At^H>
zothJccJ3t;<k-vIOm*<>x1jat|0gp(V%o}}!jR$Mz%E<QChx#6<RLH5z|AGOUW!pl
zEliM6u!%uc(oHN(j7h9RK~9QOR#KKp){EPdFOrXmuY-%7m5oCTw5THhw8hx?DCCet
z2n{+P26T#_(J>>?u^Gmo6``QnA#+n>QOtQrM(|08%n{Ot9=gF+Ca4pUd@d@%1|9~I
zjLL>#E_$33x|v0MIeFruG@K2j_}G0o_?fyVaOygPjt*sDWUymkVB%yt#Gu5G?O?|)
z$|x<x!N@4cp~B9buEeNR%)6e8k*h+mLy$?3Us+nVMqGfQl8KLxfk^<g`-h2BDqXx>
zoJri39lSQ>t$i$L>MJhRSYH6V)t6D=+TGYAu?POf8XX1g9n}Z-!x^9*B{op$W5;OA
zsBCHs86grAX9rJwiT>ETmxF^*@n0FYU~H_sWq^T>jf}XdjkJ!uAY-XFhf$tUfP1ey
zzjv@#qNSJscYvgTnu7so+(eHdmx+tXlL54*dXhr|zlpwyzX>yk5KlV~V=7N64-=0t
z_X2rFd43sYS*8xA8B9!BOjS%wOzSlnH5rVhL_0)hh%#k~R*5o+N{fiIaB;G>vrcDa
zN@Xo&Wnx{RvO(p53bP9PdcFO6=k=KN#2NI@#@;y#IYjK7k-#}4@Udc`^Qd5Ach1H_
zML|WIu!*_4xwx6Jk+``yWM?S5IJ-J{LWo`6Tn+sMr)U{ZD=&R(Ydvo(FFt;LJ})b8
z14~&maW5-R8Afx^$v+a>3c@g&G1^ww%gS3;*4xTU%v4^%M9j;|ONNin%gR&NPDW2r
zP*71%MphR@>w@}q!VC;dYD`-h<QTR(<S|GHNH9sr$UzoJ3X6#di?NG|i!q7GDdY>!
z7hW&SEG#Kk%)rmX&B4#XQ_RgF!p+UW0ooPA0bX^=#>A2jTU^BrULYyYELF)QBgZCD
zDaOGeCMGNgURDNLGl{y)3Y1hpTf@N>@ReAjb4Ibopyg$-x|UHw;IGjY(4wnY=&CCT
zfpee(LXAy92}jw~7<B;_=wKWWe!pnZq7Jh}?R1q8U7bMHRGmb#&P9tBWx1!hN30F<
zo8#dz$1iwoxO<v=7HB+)3AFwRS|6~3RzNYbu486r=VahuiDHUmu(uaD#wcW|e*ly<
z!Dm<*gR6(|@MHfPnNR#{gqWfGe=_3{rlSnj41tW?4x3fY7|pepYcpwxGp-O|40mOe
zGH2vAXIx^!*k;VQ!iaH&K4Ypjqo@QUiv;6J9>xwHMlatcUA87uSs4p%Ml&r&ZY{ng
zMvUf0j7H|VjJm=0PE8Efe9BE?tArVaE%+Guf@EbCI?9C^*@PK|?PnM<avCvm7<o<6
zVl2~Y(_+fjVhqz_?67C7vS-Y-XPoZEIFp;Pio1)ODU+Kqikp$!%U+9Rnhc|iWbjOD
zMr(@>2G9_K@mnL%x@p9@k)S0`g2r*66Z2wY-x}#N3fzk|ioNny;0S0(|Fu{n(7_&-
zv9U*tu7Fm!gQki>TA-K_L_^v|pkw1eC%D=%nd>n^jva;$$bp7Uz^kbFm>~HARBZ@@
zcZWly7{%=vL5ow|<UA}aEX^EL91UdzB^CH3WfLQeBcwznTusz1d89OCKoSBH^8B(Y
zSxL^}QtR1y*=1G4H7yibIay^@#5GJ9d4&{Jl-0#}WHc-^q;yrKIYmsATmwZod^trF
z6@?^4g*d@dk~%7~oT3)0-oB!|jE|XEnT3tzq?DyaxtW-mgpK5-l%+%<ajwdg!5GeT
zkAa&(oI!IZgXsSQ4nn*_9-{or4!lAh%$%GOLOg5?4EksF&w>_voz=c~7PNH{X$H_3
zi5)MltSm0Bth^jbGi4}BNhu;@(5*qv|35OlhwdZtWVr1x&wPO^qw5Bd10okhm}iPG
zW{5C~lt(aRFfcJ|fNo5aVPIln@ZN8C-tN8~^D;X|yYhZrM&12#=jHCpF;A0YOqXMn
zD-Y*N=VD@m98Al><#ob>(ZagcyugSt(TLG#f#e3s1Cq?0l8lnI>HLfy{1N<2lKhPP
zt}c$XuJsy>c^Zrw>y`H_pI2sXQ)X1I&0uA8XANg%5@Tg#bz`uPjg9>q8+$D_7PN&P
znG*{>#ug+QdoT8FY^=bwc+mDM&{+`2K+7dS?I2LA4^l(OF|mUhfAGT|?3fS>c8~@Y
zK@$w1!<LxdTg274xYWd1z-Uhm_hf79WOof1{qMiFo3V_%iJQ8Vwt^^&IFG5av5TFJ
zx-p+HtGJRjV{~YHtf66Sdnk-{%JtLI^2>FC(!DY!zD9;VrZODd+{`xY!tO41{(|f_
z%-o#p3``6fjGl}^Ootfw7z`PvIcRyvN60gINHc~DF$xPYvIsFUZ%{p;dO?+0b#k~!
zx(Jh+h?$6+2(ySWcm__CSHMGmKhJrd`#j7%Q#@rOWiw@&WkExhDjeJ%vPOE^v-njR
zlmwKRIFx1yuq&`%V1K~Q!p_dmFiS$8KZC*E_zGwzcI?}@SkS>6po4fo2XASE&O!Pc
zdjPuCPZ(phoGc?`d>#_yV&b5D2VE?8OiYKBTS-7rSw=`-Nz+1^Um{3T(Og4ZO2<lp
zPf3(bNI^>6T!c&5ma(0QDTRrNg-29CPLk1DM8#O)0V^ly;3Ih*X&xqKW_~$wel~V{
zRxa@QItmQ-jAt2t3aBw$1fMH@5q$d<=qw#mCPv0C237_c2Ofs?Oe{<+>};&-nHd@M
z^}z?$Sn7k0tYQ>Y7F0B4Vhs4##@O|5H6xq<|Njg!jNmgUh1kINXR$CK=NVfD4@Osq
zcVXs?rsl5ZOr|ETCZQ(G$9Wi!vooGxW-K>hWY*Bw;&jC6iW9R_x#>;qpV~~?78bU)
z(@hz(Oc~Wp!%dk?w}>1Oxgx?m1C-Y^7(ls=nTG+K)4X){dz|;U@4@U*dO(jcQZG|)
zrXI6{9;2onqaGsD$(2JgTNqaw7b^#s=LNe5c1(8F77E%1+DzKox~{ct;Jg=V#3*Lu
zX2gV^2POGE`6Kz6`CV;0bs6(?86|Z+bt83|b<wh+7^@p=7%MX*7ygADu>v}t8+`T(
z=y*NQS&hbTAsG=a2s*+n9-L`GN3s~z>Kh3f$4UsijWw#(H#A^~hpd;6jU|*n+4UIJ
zm7%j@VxZ$g<(S3QLF*tO(`Q8F&{g7=qR#HpHj-gc>MC4PGBVs>)pTtvHMk|X3FgjL
z-9U+Hj4f<z4vwnk{5&c;1`-z})m3E~L04HaFftf0)ie4q9cN%>kaOS%34#wOWnwU3
z0xhVGJsS%uu|d;W!iu7b28=TQo-iHvU|?d9Vf@5s0$x`o&yeZh0Xoz4J_9qu6p{T>
z=cVpTF-uM1Iv~Ob-c;_uE#M)-CBmhsaGsx$f7StUMmKRrTX9A<aWQc<ac1$1^RkSx
zvseW<c|BMe^tIo{g38L+x8RdAE%o)a<6>h4-hvS9Xa_weNX7(}g`k5SL_w$Em@%4&
zs~OA47^{iHXlHFXIc*STs#P#i5f)Z4QGn6PnwrYWnwpUM%bbCMiIFLXft!J!A;H0l
zmlbr{Ea!S|MsEHF)(Nak-~;vp_!(I_`T03nnFR%S*7L=HPNC&;0EN<ee-1_tP(LA_
z!9MnA?B8Rtg2wu9K?|FO#4Yvp^<%;927v>hGkHOa#}S9qg3g419Z(B8XKL}ull~`9
z{`(DG|E$1h&1eNa-zna~T3A6@!B~M=cvCt<IRjG|10w?iC&wmkZfO}#8BQ5xC26tE
z@*JBzxfr=Nrpc7aw8=1s$}q~f$ur6qGT4I}<6mMyZ4*%E-cny*(AZf2E%^Lj(2^x^
z8U~&CYb>g$X2)a>YH5J`A&`Tf#2B@N6|^J-6`6Pa`^L;Kt0t<XttcxfCn3zLBIv5?
z>ckW#s-dnf%BRR{uV}2LEF^0vAtk}d%gbUYq-+HWV@3ugMq@?`rlSnH4CxM1qAH9m
zDvTWFGU+l*@&cR6WEo|ZI61YLHz|Q;59K-dJ(Tpt#I+N&7`61|xi)ifNNF={)>7Xn
zCcIf(st~*!`il|t;7rh=(ng?TGqpiQy}*^&x5kDBpnYz_pmRU;nBb>nf>H|9KcXVw
zHWuipOz@oBBvTC`c@0rvNj`Z=0aaOPJwZtUXC+Ttb7f5-4SOAV2O)kjL&icyDM1N-
zc1u<sQGQ8LVGeU{acez8d2VB6DQ!g|K294BUWi{CnOqoc!Ryv79QYWSn8Aa0%nYmy
z4$Ppb2_f+6-viq~^Vbbbj12l`rS58LOQ9@WXJl75XSD6rZ(usxsWpp%5j5w|_>GB!
zL4v`;L7uywQ%JO)p<bL*oKsv%Qo>WrTgZS{$U}&wgFleVlfmBpt+BSfJv6Qb-h%p~
zh6cjWO=sqU#^Cj<YU<#z0KxTku8`?3g;X1JM-gVG7$%N?ui||m6JY<?IfTT;WGv;=
znLuZX=>B(SQfAu2AjV+mAT3xb%v{PU%*x5hD$Fb)F60a<2ACN5Tv?nM?CsCSf?NR3
z0s?OZ-WnP(DzSm)ok1;M(82s-;)2S8dW?)M90F{9`8B#B6<%5intps*O!<tAe{M1E
z=gvqo53-P9=lU~;S(%9$Tu)jtJZ3n+m?;E4t&ZUXcwEZ;zcV8Pg9w8xgOP(IgEaFZ
zLFt8}+}v`aqVjSKJj?i(aY$uyWPy4xZ{He$0!3dCbi9bbTR~$(198y#MDUbgY-DPp
zrq2l4?Zn4;QdWvfLPJ(co0pqMT2Mtygq@j(hnv}fO-PkbQGr`ZjE{rWl7&s2o1cT3
zOIk{lm0Jny9ur1uh7U}88Mqkq93&a)xfr=vHnFksaCfmYR<kp*b8Ti|-N;<PVE^_W
zXon`)L2u82ntJflff?CN)V!n(-NY<Km~weFrT*11s_}`jF)%UcGI}shWrEzj5anP8
z3UHKicbe2PDJH2YT=16gN)aYD5it=aPA*9<CJsnBEIx}rLv|KxI_RhfSR(>FUIjV<
zLEvpHfr56bw1Jblx|4x4jP^1&HnK1`Ha2Hs({?kGmNs(J)^<0PmNs<Pws3Z~Fn4hQ
z$DI~~J>zw78|M<ZjdKaq#`*uB!GwW<IfQAifEq(J1A~A%!)3;7u>2J!=6@%k^0p}Q
zVPN?ZsJs$b|4{)ohEk~hDzH3QKLZnk+<zCwW+n~>MFw4lL<hTe0md`|MlL=^4y7gy
z4T*YbX+G8hwg$EdY|L!>4E1~toctbqdJ@{I0j$g&m291CGufEgI5{ObH946%gTRI9
zTO-hwG6L7M-wGUyH4?Pce`~D&7PQ=wAvRVVR2#t-GMk#%v6$L0svCp0&a)sNR^4E3
z2st3n%tKdLQcYV;5_CYGqK2ZlD(GxH5EfT6RPqoKlhHAkRy5Jq(l-Hh;Y5TLOmsCg
zv>1;dhUfnO2i-=)G?96UkQ##u_<TlZhG`6Q7=%G5VLo@@l#yZ+3}E7xh91#~MSU4V
zAHy8b*^f+e(vo1sQV_*(wR7QSA7$u=s%4NvF&M748m{&@!vvUG4T!nwXy#VI)n<Xs
z6?YI|W@3=mWfKfw=2rD*;+FK+*48!w9b?PD4E3iwg93v(gE6DhP6ny}KcFk54>$-5
z%X)C>If#gR=;;}XI0(ynh-iR%3qiu-9vT{khR`ciWmTl5BqhXU#AQ`vi)F+`WW;4<
zKu0;UGqE$MtEsB6GpaBaGqH;>u`@EMs4&TxI+zxiE-+>0H)Vt#`(pxHL$r-w$RnJe
z5wxtxL6?KqgJ0WFfuT~}L9sz`f+F)I6~<;2#wr!YNR><#CPfuR6&ZGRaYIKL&=EoQ
z_MqeX1wnU}fCdLZ$1MunI}19v3w%mwtN_|Ma{`b<=D@Qt$Op}Vrf8tY%|OneV>A{t
zmeAKe3p(Oo8`R6zHV2<u0xNT&N3wwqp<&c#1P?E;!)~nr^%>XMr>Z*H+bRl+3CgI7
zimJ*8iU}#&**mFa3Can`D1c9-+b%CACJ(}!Hl=NxKf#?(OGZLfnvYLfRYF>m&wb+j
zjp@1(y8fVp>J~*XK9G`?m6DQ`Wng09U|?VdooNEv>Co)p2EN{wUxb%iQjAYbM1n)e
zLxfL^ub5v%gr8r8Q>>VYjg4QFg<XbUnyHdSO0trRn~Q~ug^NRsu~Nj5pPz$|kFAme
zbleU2Rs(xbdjQ<B5dc+pi~`35t{poDIg=QAK##USJLsT3Xeb&QFe-{dp{XKhP82i`
z#LCCWZp@th?=mCjzweB0|DyiAm(Ul>u*}U!HDcWNuh_K6_6bwUMgPx%n{*;|bM~*A
z>F(pP4K#oTI%Ccme8!vuqlANx6X=Y&6g|dZJw_)zMrA!?JtiSNMrJ+6P%TDfEn_Wb
zEoLDtMrJKWjtpi-O(shwrY!v`{VsjxS$d4cTFqLMwV0E%7+tkOwV1#M2-oood1%>L
z*)iFf=vY~?3V@EOa}twcWS3%;D$8J<!O9fQ%E-FX(a^$9(@QBzDNBi2iCtb?K3qOs
zo<+XKOF2q8OPN`jAw#V~ZH5|ixEiCHo2x*mK&k+<0EeiAXoP5nD2r%KJO6ZkCRhGY
z{#1Tu{thn2bS}mSE=Dd_@BoH!Z0y@uBT)O?*ci;ZcK6;{MuC4IVbF;VM*7BrmilMU
zoC6&Z13K{%e3c5gW&tPjSZ!9w=oe@f2Xx*AXp;wMcni8<2-F0{S{*Yo`q-totK&JY
zjxRfinUx8n!e-&+<}i=0^~ZIH9k)Ca8#gOPZO)^>@c%#PJT@i?aG9zBEmON8Wh&$U
zPY#^?ybN3cjEvADa<Qo|W2l3aeT>R{JYdDV@UjndR*M8Ps7!^JeUu>=X7*<XPC*=I
zgUU*X`Yf<|sf|pGJN|!m;N??b;0j=3l<;R{<bf2I|Nn!|^JO>-cE1k9{r@k)?0De-
z+LHx3wwDd!B#`^v7#J9tkkps`KMHlf6dMZzNHHr!@&Esz?gyhF*!>W*kN!UmRm;H1
z3RVj>`2YX^9~c-Im6$<php5d0s}*+;03Dbs!N3^6#310$$iM<|IH(QH#31_r7t=hZ
zJq#iYstn~0zM2x55|t9n5)zD@5{w)YWz!iL!x+*a=Vh-{XHYLt-=NN{tRPp+%f+!m
zja$BwOHq+kPC}Ydx`sDgfKk9zSzMG+w1y)BIv59?dwLrSnr;H`r~G!#`0qL6Sb=Y#
z0Rd2wg*AfI)Is}fp}RR4L7fV4PXc_wh#Vu+Jm`Uf0RiYo3N|%y!VeW>wDe|_K{;3O
zs`n@GCE(C>*1`;`GY!G<bev%$EG(5pIE4Zjxs>4H4n98#97jfAwayF|AaTm{|Ahmm
zwk88-027NAL^&u<(bbnR9EO<9B&nqVR;&q8{Qp1rjLb{mIE9#fl;I>)ErTwK!Em)8
zH$&7OXE+5@YYs8j49#3n{6o}cfz5@~^IA3xoB_-%CjLw;8i@D@t!+(Ye8%{dft#Vz
zA%Z)Ba|0*SG)_iNPDa+5pkumEaxre>I>yB`jf;`%7z^W0mXj<@Gg%l}7#VqZ**0)6
zPUB$Y;NW7K2}-9Y*%`O7A7f{l#?Hvja*UC2C*w&*rkRY4j9d(o`o^(^MaE!4Kh_A;
zAua-6nFA3A$?3-$Lxc?tRMpwlLDN^};_Tv#>n2UIfT2Vf!(!4T24)7E|96-!G2LR2
zVo+hwX9{*Gan(!JV=~iC(`D+^VXTm2%#dZYlx5VAW%QC@Oc4hy=}Hp?-C^g()yBn?
z#mSh%!C0v`Q;#W1moZ9*F-?;(RD;n&nQ@vNW0)+Xt1M%)3}c8C=!~{$5{#*0j9H?L
zks^$<xEQlI8Oxa&H6<)1m_WM(xA5>X_^^Zadr5IK_)5siv2%!Xh=Ue_i-QiL5eIEr
z5C`p?72yS)2B|8fz$yybO)4tE&ETsGTK~n%;0xY*#>K<nE2^i<#VH29qJ<N*tAbOA
zl|u}qTMD#fjYEusK?}4{T8o2^!B-1(E|XRnXxA=i#Wjd_0G-Rl3tG3ryVCH3AtR_#
z1FaVW-Tvah&Bfp=T?Rg^0laKm5VU$7v{?swHkX0yj{hIFgVtNh>ggfvb^EcI3%u9u
z!&cCe<QniQ3|o-n#Kj~8Kz<e#6#}i_7Xq0j<faQ=C8{eYrvP%K0%+-`0%+-`LJcQ)
zqX;Jl=xo4kpglwE?4VO#L5>H}pj``~1HZJjjRlRhj~Rg$!D|}{8iThNql&<HJc2en
zLXKtyU4<g4uMeV(v}@as9653XydF!?7z9B}Yd{SXupp@7gjAI%BzOu5Ndz=Cj+jG*
z^j_K3&Gi_~MZ_7wBR8NWB%s+QMltZ=Vp&1fb`I*gp!3B{<29}9>@@V!bQtZ;{XvI|
zJq4XKCMNRlkSXMtF$-V4VBPv4&`D#`(%yOjx(y+@Zj5ua)jiVxtpS}mW*V;iFI5xV
zrvsmv11^s&pyknGNO{BvDvzWkS%d-@*`QqsP<e!|z6{(?0i8K7DJ20`3~KJ1AejxS
zBOzvkY9nwTg^@uP#o+({86f6@${2{+EQUD@a~L3XjFb+GPyiE~ia#Tp1iXy-{~y#I
zWeNhj-3ILTG6s+?kXxjL1zC6k7+HiMWf%h!xGiAM1iI@)4s^Df`2QE6#47^IT%b+a
zqGfD+{QLsa#j<R&Y_baS0>U-i43$#cQruE9a*i^fBSeHit$oly2zUs>QXjNLz(~*-
zG%%vAZEkD?xmZD2i;>+_)Q-_u0kkt+8+2OHPRMy`7Jm9d>cYaB!i?tH;Ir25@hX^U
zX_(6MFdZpmV!Ue;Y%0;mv=uy7^o7ZjnS;TV;etc|G8aa57e-MR#+3$)EA<&y>M?3-
zFv@6vCQRA*8QGl~D=Zkx4Hzv&7+VDx*@VP|)P$IYnv@xuj5(TCXftX{S=L+6P-dKA
z%*b!dsBCO)ZLG}f?gnZ@u25rE6X53aQ0rRC%h<*{jhBg=mywrOw9||+!>q!L$*hCT
zaT(}Pb$=@_>FElL3LR3SpiL#9yD{zKKvP{pmY~5=&;?k6@S#$Hzvu3OMk>L>5TK){
zz->ra+6N7`L+=X_2i<nV2AUaDH<x3E45&kv*MqMP0#(xBO+=ufCOIa?FMOhs+~NxS
z60-c#`pUwIX`)&x(rf~3l8WjgMh1G8irNxfI+9$XLfj@M&bF~KW&tK<eum;q8ItOX
zGCY!slKf%<eC)h(>ZYoGA#9vn%z?~YT<kik>PAxh(lT-)8fH>l5|UDUI)*;Ze{V2p
z83kD>NN8Jw(meRg31$u<HPCKOK{f{GAO;3-&-|wYr=_`^Z~zm(1vsJo|IdJ~w#=0Q
z+$-0%Fas+z2PuTBV}zS|)ElBs&>G1=m^!BWaCOIhAnJr%A?CRto5yqyt}e@)0hD`~
znFKAo<b(s5`JMck_(4@Vs3#6Hmzf)`{+Krd1J8D5CP90UxgbT%40`{+Fsn0(GFUVC
zFoZI4I;;x|VoWw+3^8FW*JfO*%E;`&#aPM2I76C|O@NVunNf$Ak;%8oQg^!ga&@K-
zX2vvT#!zNPcV<RbW>sb;b>``z%R`ycLK#Edj2MlY#GUNLmwPdKH7W9z>$dAMF-z+*
zN{d&EGg<R8%J4EW@$)k3iu3Y{>oSLj8H6*2w(=V?8XAfVbvQA&GP=5Q1azpEYcXbM
zF|unha%cr8$aW~I1lZejFmnW2n08o#b~l2iYK=iBp1(D+2Q>+eK|{=-&bqOF>|4fI
zSfT}=l^Gu^aO7>Qps_J%hA1``c0iNBk+;wTGePr3V3o0~pmY40K`9qDkAXbH2s&B~
zbT$QOu1J;<u`eEUsD~b-I%FxCsE8PBk33VlWvBv|qMoIak-r7>#11K>6FY(pTmwZE
zqYN_&LEGqq!ywz}|7Drj2=EIsa%+Ll{cvRb3Eok!D8kGmEyXS*$StIS?K}^CEnS_K
zRt|1f&VW!s&~AHwac&t0707`g#%iCT8}AtzO#eHB&%m%^Sm(g&Zxe4*Z^LW@I@p2<
z)MSw55b_Y?nZ~k=g^7hxNJvgC!K}fI$;>8Pmr=J#4pgYR@G<y?gZ5g=%Q4BVwpNeO
zV)WEv)ap=E4loTjVg#jt4h9}h2~GhIPF?{qMl(rJYlc%&Qc_bgQ?gQ$l|vF7gK@Fi
zM%V7%Gdinn1RC5lvNr}D^2!Km=4*rRjRg%696cL*<Q!<BG^2JbE9%q$Y(SXN9M*Hi
zT#m-f=)jHH^W@_&_BD}G#(GwUw30fHBxbKuiJx89E!qt81PvBYZw*{0g3i$R4K7PO
z7#KihNhY)``QX5*uFAk3z{H{k%F>{62VHGh3?%O>s;Ph#s)7{${|`BTf*DknK+HUv
z1X0JK2{91VFgJmz1D6pHby-Q!GJ-?hl7T&dnZ?kbi3MaRDEt5a&%nvRz;G1o)<Y1t
zo&viS+%jNiWn>6oWP)@O|NjTA!(;e`q_%7?*bR(QY%E}f;GXCI|Nnn7Ffek0-3l@D
z=wXOD22Lac|NsC0f`Nfi3hY*hx~#)sw}N||j0^!xOalIlOz<8jWY4G>GY5kbgE_-v
zhiPjR8D}aoE|X_mCC%t=$~aAtQCyKRQ-YCQkg;5VF-(AQnH-~>tgNUaqaq_qld_t$
zgqWIM6Q2r$2?JA{6r;Bkqr8;9l)V%)Kc74w69<Eu6d#|I8Uu@^1*jj(Bq3QY#wZpl
z%;+Y}DBPi0qR5!4$QZ83Xr{=hEzQU%t;nn-sVS++spKo$D(7$9;mH}v$;27RU?2O|
z{;fT@B>}tE7M!X<_kJ=$iz9tdNfjFln%yuq7P#`($SBrG-x#T332sa>!jm~{HdRbq
z4b;B@Pl$<|Gn=TvPY{x00<}(<%(4}9#4X}ItRt;u6>Y-JjD3W7v@68Z6pgLLEM;rb
z8I`;gG%ZyGg_KQ|^{f?nnI@{5O^9=<j<?kDh%>f`vR72#3vki2S5{Xw<}*~${#VK&
zz#%6lsbi(2;%F$V;}P@!KX`BdH*kFw2u?G{)1mRJF2TSaz{mtn+~BnMnMoF$UV^~t
zoQuF|3)~A<R^(&|VB&*xnz5)YO9h+BB&nnTR;UQi2Ou*+`2b?(QP4Dm12+?cDw2UP
z^T2L|s5_nmF^@qXVxAtddEhh%QJ0ksPJ_%$3`!=P3<1o1I{r+23a}O_GlMyUE)ye@
zCU_l{G()n3!+h!W(o6}`jQ-N`(o77}0@6$z(vr;lpjB1;oT3eq6C{~H+xeIRL>ojW
zh%!rxN{Y(L2=Pd)mttTGkYbc#(2u=$R^acwv!FR9OYlszz=6Bk2L#T7(}<ygD$=?u
zW+XOfVJf2x^e6^NDE$v~50WnE5C#xtYyy!W9OKWx%wYQe4wEdCHiHy{5`z|_g~NsE
zs>@ZG%2XLEWLL;CrAROaOE89sGkWqdTJtfk;$@8H%I0D+V{~I=TE@V*T#IqGDr2Q8
zW40<|lq%y&CB`YTjAgQnsj`eAvW(#pj1gQNTud`$8E5b^R`Pc8GD*nE^72UX@NjD=
zh|c6<oX)^l#?Z#V#HFUnAjK^uD!G80alIrXw<NcuK%{b}GLv!>^J<+i=`?93=_Y;w
zc>yL4fz{eGR2fy}<UAE36_^w{q$C+6(j*uqL<6SFGRmr{xoL!HFllt~aPx9yaxrpo
zfEuo`g2u;;-hze+K+6LW97ga)L(us)u|k%XlAv%FvNVEr^uV+K#)8JsE?z9;tVLLR
zHWsCI8ym|A8~y}kMLkAH`#?<GT%1)zTo}}(Wi?S}1`X6RK9Z4Faf~wcw=y&}gD}-y
zWerXL)rwwGF?983)DXL@WbEqCxK}yW611CK(?P>cInENap<GPFLF2!pE~Ai4dZeBH
zzjxB0edYiEgZGJk0OzR)NS;au=P7V`Eh)~<5WvU@>OYx)>K32>Um20rmPLW{0HdUY
z7+9gWzqU50&VZ-`<td1nM-w3G7^IO5gxL+wD-d;A3E;fK#K<6_#m*4G#Hr-Z$SDTR
zEC2sP_St~l8UuD~nKQ&(J!M4&;Q&TXNakk(&6|VI9ur_tWyo-FovF+?i;1z5fia4K
z(UXCZp?-xR<4i%u2th_oK}$hTL1w|#>arY?_0p@<IG8)+6a!@aq`bR1XLB-f@^o}d
z&X#176ayy(&=OJb))~<5YS77Ipcs7%j@{T;@XmP9!3Cf*ZtR$lw#LIwg#m4j=U0#w
zV@2N{A1h@kWf||R%E%m@3_b^DAIc7S=8XdXzH@S`xuwFw032qJFv#+RhM9<}xq@&2
z6Q{mEBd0Pn%s~A@1_p-pOrZ1h<Q)W|cM`8+u4i1u!sH3w*2XCC?JxY)nDw!-OdQ~|
zi){Y?WZuWLhe3!zmm%N5Ls`9;i%UXHNmHMlL7ah!p{zn;hQta9W>EIg<I<?qQmIr^
zQ_ynaR;pBR<lvLwi{Q)PW8tfjb!BH2XANgfXJz4Ftr2!(uzwpDYyURZ{;d({yh-rt
z6oGr7xdiCU4QQn?YzMuWId~5}9}}YzqQZjC-hgIt_!t+fxu@BSD{}L2is^{ir@5;!
z3I%qhT3V%c1&UipGPeIa$te8qEn~pH1W8NAXonP6RYpceCMH2<CRNvzfAKmIt&x!}
z5jrf)fuPwPX7IfqpmOaqb3KzNXtt5ThGBt2M!9&qIFpJhv+!~oMw<}16geh0IYzlA
zje1qaNR3PlCRGhp4em@6Mw6y;$#zL5$<=mYdTDx0dQIS~3|8CPw93k|38#xOiUpW^
zYDa1_X?F+&s3<DB8HO1$8FsJ*sxoIVFoN2yu|`Jr;L-*}LUtB_ZWICaoCW@Yuhs+?
z>5vA1r6p)FLoB#!0oiyBJ!}A5SAr3|^bE3W3f_&_@1H5FkYsNf1v!wyK3QHd)eNoE
za2#|x1qd@H!Mh7q#%31k9+0Cdtc@+qP`U?<0uhjtE4rXP1JJsAGbT4iH)ci#Rt7x>
zi2zncR@QjX#ZAo2Y@qF#pmQe~m{=I}jqie2pGoQ)N$AHK>BmBjb7WWF*P))6sLssT
zuM-iW1KRTt&ScN1%FGQu@1w*)oxwp^-h*L*h!lvHTEMklgpq+ufNKL6vj`Vx721A&
z#^vB?+Yb(E9DE-9%QuKK%8N7di|dQmi_aHl;SkSVFUu&qd<H8cs{jX|2WvKiy>{%|
zLi@Pb!os)Ov4!y3RN$?)z*}wDL;);aqAg2Ph0UbGXd}?vDF`zyh0UA7XxLmSBZDMU
zI%5*keFj;EPzUo&_Dc3nc4k?A26+a32G9w5{BjJkvJ1F3a3A1i<~9*=5n&QpF9lk9
z$1KIl&&OxU>dDH)%Ag+$I&<c3?48)NAQq#*HKQ|epoJz3$QzSDhd_evSB5N6H)l*@
z6_ipI6;T!tl;9SW<uG6oLgq3Z=TVRr=HuYtkP(nTFhTLkz{tY*g)x|cnL%(XBP%lt
z1L#OjMu7tY2Mi4a6-5<6XL~XRgBG_jGB7Z*Fuwq+6~<5tQO*3~j|50HBZD0i2g4br
z9}K(<-a8pMAeUQnfHqR92{QPyvstk*a<fUZX|pkNurW08a`8LxGV-!dU}My0vuDd^
z107--8w<M1T>!Mp0=$YJbUKHy4WqdoBWR(9sEC-j9V3gHw1=oDBO?<Bw<woMnvS?m
zlscC<Hv=PsGUGpnjZE(tco{Su#M#(+8Jf8GCh#)q^V;*~^D^_YJFqc=u8=g=kBwy%
zxboH*w2Mzzh7mNIscy#z8in16F#R3K^i*AO-AJ(Mpm0<E-^8#Hd>_3D=$xGg4pJQ4
znD@~Ouqm*Cuc`!jMNr66{~YXAdK^|2foy{N1JuW2;$UoI`XQjka0<LW<`ly=aGz3{
z@gHM4(>nn*hSOm2(=hS>O^oRbMFMIJXTaiTK;jIH3}y_E7|j?TFbFdUZ{uQM5EbEP
zWdJSh7qZkpi<uc1&4d*7<>d4gg<v#eqLiMJsHl>jl%zh0)@NX3P++iU_{;c{ft5jf
z3kwq~BWR_Z0}r1N=>9|&RwhOUX3%8KS);Q?j1mIZj%r^uI%)*E9#foMoLym#V}8CP
zqs(39UAvS)do&sB8QH;RX*dWmGqNx+Iq(U2FtM^TvoJESG9ejs@2vJ+qq9ba2Ik<2
zootXfbL>IJ++|?=AI-qPXbHZHN!>w|nUR5U9r)H#CKd)}M<ypm*s<q~`T|$L+pY}_
zI2n~087==+hBLM^ZT;)bv=v-`N-<h91~Tns;A0SG@Nv);=40E$%DA3+KQmJS^8{ul
zX3_ORj6w@IHgGU;hzM~qZf0g@XJ*|vpP!MRuYke+Z0z52pxYyij6h2r&w_Tz#e%NH
zHUgbx58CIWY^n^}!U{@m#*FetMlJ{WbWE&dS>zqW%b51QvW|__lXP%#lnhn~R0FS1
z)BpdONt5XigE7NVhsv4ij1g*#Qj&rKk^+(fdhBe9N~XqI?$V6X5<>jTMHxlw4Yf5G
zmgzCh&|_rN6Vr3kYtv)VGnuWpT9HXnu0xf9fuE6szg5uCP*7S!LtCO%N>EBrin&{9
zwi1)lbX7*xz!vr?>`d%j9h1SQ0I<zqX5?pPWDWtXC;%-e0Ier51~r`ZK_lYE`k?7H
z0qBbTSVn<6pi2mhj6m&fBO{~Ob4CK!z?%fYOFh7KUM%>?DCn>Z9}~MVyE2=Is0^dH
zF}pIT0R}p`lo7O@M%mO@&BWA((OlWITf8Gn$4XX2!ctDnQd5fkCErs<#*2Kr1tXoc
zY$ZelLySusgrkBO8G{&al$LUH25@qVX;><I@JjLVN^pBbx(RUmbBfd#dT>kd@=5V}
zfbtP6y$G=}TxMWkU}xZAjAYymT_df{V9OA|kj#+JzyKP<MjrfQRbqow#ik}|;(Uza
za-ecaU5`=S#LQfskC9yryj)crG`nLYCT^~#&ZY#CRZ~`C7dI1EQfC)4HUgDoW=7&*
z4d!BI=4NW@YHaLmVvM_$to(I#{j8Lfto(F!{jHS#P2(04;^q+)Jjlhy$}Yyu&C1Fy
zHcLcMOjw#tK!8nJSWHkPL`k1nlUbQVkduRpkzZI&Qba&lSU^NlPMDvOi-S{;Lz!8V
zSznS{jGdL08$_}(86exCWaTd)$ipot$ju|z%qq^!&Bo3q&c)4|Ag02t!lx?EB`U%#
zuEM9nt&*jyz^Jdt#LCLg$-=A5v64fXmxYs`m6b_RpHV?~H#-|QmpB_68yB}Y0~>?<
z|Ie(_j6Do|3{nhw3|0(&3{eaWs&>rg%8<*t&5?GsnxpNKQN<(23O<n%vK@=*7>ks;
zzQi#`IZ-2BC6@aEZf*e}{D@shR!mG*h=T({v;X!334<_(6r&bs^&SW_X+?*5OFNp2
zx<{rM9Zbx~NCaUSV>K}`HDeiBBM@yQ3laliH1Pt&0zW<mPv~6-pu3E@8Tc7Mr?N>i
z$TKK2s559Y=rb5In6v$K=wg#rlQxrflTMRvlV-DJmuBQ-lTwp1lX8<vlWLP<vt^fJ
z<YW^T6IK&W6J~+T_M{21NU}0YvM_owMlv#aGDI>kaf)Y(GjWP!iZHS9nenCZF|+ZS
z@uu-Ib8=g9GjVcQaxmG>GF)ZIlxbLL*lEaY=)lG2!DYy3sNY~>?V(?<(XYXzv4ioz
zW*vPGjU9{)4$2zZ9<wx7X)tAKRBCi;Fl&G{XfSH1fHc&r^s6wb>|i|Lps20y!K)&x
z!t_^#ahA#|6{bv;N|jC(W)-k{6-E`a9se5~I8CfQ*v%NtOpQQ19$sM&aU*pjb0cOW
z?H&IZ9E5ZYJq)!O6|@-{wFR}AK58>+!_{f4Yny8`Ypd`0$FNz~&_jL4KZng4Iv(mE
z1^+<?sjI7-t23+1a40ZxDk$lT_KPxciZWXFi_RDQFUlMz%D7kbt>|A-=4jC2kc`rz
zjH^U<i857+GJ=*+F>#4<3Nu;@^YhsA<nu7w`t$Vj?B{vU!y?DSxR&QF4^uS{qYn=w
zHxJ_~o?ARjmOPm}OdKG-K1VzU6DJ3w^-B)MevbVdOmZBIZ#fvNIT(F77*}!J0&SYM
z;9%nB07W7byA-3blq7?Af;f|HfOv!W1o02zEDyy$iZg8zXABl+6cKk3XWA;xSSG$q
zoQYjrT%3tZoPpnfKY^dwwt;^G{{wzzL4HSmric8DMf{8SnOOK;_?fQoJJ>QV<!4Oe
zXJqF$=V#*PXW%m6VzQmUb%2XWk&E#o7h@9_V+a@H6E4QBT#Rm9jN)93TnsFh&zKpR
z*#sozK|#oA;_4x<q+@Df3W^9zLu-#jQ$`2V08=JKQ$tfG0aHdsQ^tp;A5EDynI1B|
zWXe3*bg?N@lPP1eX|X9&h$*A9DdSXAMpKYkJiJ04?55(T>ZZ)5N<02DI0#F~dTdoX
zs&rL}d6^QUlJQ*~Mx7mu4h}pDY98J?(K<{vU;zdPF>x7>RXV$LPU$e~H1G*~=s0lk
zdg#dSU}D&A<mw@BXsBkggNb3YfwhMTG?AKs5~;4TwnvRgj|o$@31f^2qqhm8Ndv#I
zhY2X{nMf;ZD_biwD<2S%@=(^)cGBJPuVJ%_x`*zLe+M?pDSPPd_|LFeN)|*rY!;LR
z(G3oq+yWl#x{O@9a-iT?W#sB1r=sSlAn70(AekW9Ah|%2O<a<ZL6Xt>faC*7rj3$}
z5K(4ENkK_PNk>VRhmwqoBo9e4HA^xUNirr&GO|djNV-TeFO@thc~$bMB#W7(n`D|~
zn<NXnq`D*%x1<6aBRd<T%{4a01~$fWHpYi+jLvL~OTnT%Y?<OS#hEz78I8r|1@r~%
z1?mM@I0P7N)(gBBV45eu=r7POz{D%SD6oSu!QsD_r;k94K#sr~fj0uI+yacJ1sGQe
z>=Iy-6wnmN6kz5MVB{9yXO?GXa=gLJ2)ZtS=`S<mJ7z{7<``zC-OP-um>D^lC7GG{
znL$ghM7j7S^d<Zy;wADW`X$y&uv_n!I4|*Df_a?;qd!=TnNLDaLQjIZS7NQiS&6q2
zEItxB5;YRc+!ERnOt&RoOE7gwtdck-!R#rKDZykR!KevZ3C74R!OzZUcaxp5kDalC
zo$)O@qc=O_Dt1N=c19ldG+st_UPfbHekahuxe9^_f-JTTf)fN62tE*GVH9)}WO^w0
zQIKhoAY-s#vLF+Spo<_=s^C(=t%64#tOQxa1=R(a*aR851;HEs9Um}#U}9R%#2CQD
z=*YyliHUJB6C(pyiZOwyfC;pXo`aEt%aOtUY>`oXY<yvSVPR2GQBhp15oo{_boLL3
zV-FgB1&M%Vp^~w&pu>J3szLMOpzX#Gk+@heI~H`<57@9+H2nzE!3GwA?6N;=q-_)z
z3tC*F4PqA>X={V**9IM8r*Eun3_7o+uu!}3tWj*NwoxqnNFk65inJL8?it-N(gqPl
z=gt}#fkw9qL8il8i_8I;TND=y9#w{T3*_s#ScJPkzBYnt1*-<R5v1P82;@YgSZ$CF
zkTV3%LR=GDSfpJ95{Qk})-E!FunY|tKz>6En#RRKAfuqMAT#KuOH)NrMi3S>78Hfj
zAaM`|i$lfWa#^ynvj5gIn*2M%X!38pESQ)L5?jY;`tLNO>A!Wd5F(pV>)*A1TR<dJ
zrL64V3|U!5u$a88?7zz(VMZSiiB}1@_015siGh*-m;nO=x84#4RtA;-8yQ#`)c!wb
zU}ew(le+&~8CV&Nz-$vRY01FLz{+67z{SAIVEg|I11m!lSXVQcYyp$4V6qKNwu8xz
z|5XgE44q&Trhv(*U~(FmoDL>ufXP{4^6>xX3|wIMaDm;!1$GY?1IRsGVE1q_fZW5y
z0CEo(*gafe_i%yT!^Hq{4;KT-JzNYR_i!<Q+{48Hat{{+$UR(O_i!<Q+{48Hat{{+
z$UR&PAop-FfZW5y0CEo(1IRtx3_J`04BQNA{~s}MGiZTHBQR+KCe8l8W8h{mXW(Yw
zX0Z7Gje(oNl7XLro56-bkb#?_6Rc(mn4AhGr-8}oU~&eSoCPLlgZ0h<lXJo3JTN&Q
zY|i2Tj~I9uRQ|tV;9*ev|Am2vK?_Xk{{PIt!(aqvn}A6(Flqk(2?Gy<<^OvOJPcO<
z-!bqoSc64u{$FL_VX*!Gih+lr32a(3m}~))tzfbZOtyo`4lvmXHggJ?oC+qVfywD$
zat4^31t#Z%%|HDA3j;5M%KrllybNmpzcKJKXn{%H|7{Gs4A%djGw?Drfn}S)WDA&V
z1(R)HvK>rz{GZRj%h37%8v{Rs3WFpAKZDx;#|-=oT3}L_L56{!!RY^e27U$;FlolX
z$H32E{(lz(KZC{pmkj(2mj5?1@H1Hbzrn!IV9mhAz|Uawe**(QgDrzF13yC(*vw`y
z*#ah8!DJhlYzLDaV3nO<8>fJ6nF=PSfywD$at4^31tw>M?VSTA=Yq+3U~)d#-oyXz
zGYBx~{(r(Cz+nFW9D@LZ<^L}X0u0vwA2J9q*nmZB|9@Z*VCeY&i9vv2{{M3ff?&T3
zg8eQC_PZbh$nSz+zYBu>F314#yCB%_f?&T3g8eQC_PZe1?}A{z3xfSF2===m1IX`!
z3?RP?GJyOp$N=)YAOpznf(#(P3o?NGF314#yC4I|?}7{<zY8*e{4U4<^1C1d$nSy-
zAioPTfc!4V0P?#a1IX`!3?RP?GJyOp#Gvy39D@*p+W)5vLJV49QuqH(1|bHc|IZkN
z7)-#VIfE>N5Q8Oy0)r5P6@wUq5Q8;?2!jxV4TBPc5Q8lPCxZ|}6WE+)FxdhoTft--
zm~01=9sf5o2r+bmZJ7c#c`BHk1}3M2$r)gB7MPq5HvjPdXAH6oYX847$TDbwNh2_6
z0w&G>uV#>Cu=xLlL6*Vt|7!+W2CM&{8Dtr3{@-AbW#|N}nF1!Kg2`!Maypot0VZdG
z$=P5N=77n$U~(RqoDVkV@c*w2a$q;ef!!d-0CIyI1IP_>3?Mhif!!bnc7q()4RT;N
z$bsD;#{hDJ90SM=att6h$T5K2Ajbf5gB%0M4RQ=1H^?!7+#tsQa)TTL$PID~AUDV{
zfZU+Wpz{A2gEBZ5D}!^fGJ`Hy#OVJw24w~lFbOIHl^M+cUtv%NyGohC^8XzMWd^JN
zFBy~>tpDF+P-d|Ce~Lkw!S??>24#jOunEmzvIR`Gg2^^8*$yT<{@-Fy2IqZchACiM
zrh>_7U~)Q`oB<|hfk}{Gl)-*c2Kz-B>=$K*`Cxkw|Nq9I!od1}6N3r^7ntP!|CT|8
zf%pGG1{DT=@JeYxFewDiZ_58aGN>@9FeoypFsOl5YJo{z23ZCb1|u-r1WcMS2r#HH
znEyY(paL%cRTwP)A7W4em%1tp)(nyiDhxLNH!`Sz^PLKV!~drYDhzJ_KQpK>xPwU#
zFzNOG1A_`f5SSeTCc{8wC_@C;C6WJMGpH~`fo+TiyE6tX9t$oR<Nm*4P+>>_`!N@+
zrU~rwW-!?TCR@Q|8<=bdlN}673@QwrVD(eL<Ww*@4NOi4lQY2NEHDYGp;Q<^HIxbi
zsD@Hu0M$?`4D-RhTM2gaDloYkOs)ZwYr*6uFu4UxZUvLuz~pu?xdTk@0+YMJ{@Md3
z_kziNU~)g0JP0NagWYoktp3#hoeU}rr~mI^P+>R&4(+pGk#k^?^I(yS|KBpGGO+&t
z%Am@?1tz({B+vin45|#gU^f5%Zw#sof?!hU|7!+S23fGU9GF!4|As-8!4hl^D8H*R
zScBErfK71te~&?xA@=`g233ZH|F0NS89M%dVNhk5|Nl9I8Uy$LmkeqQ{0w3YY77GZ
zA2Fyg2!h#4p!$qK`Tu(cH3k*1h}!=*3~CHoU{aSsoI#Dj2+TGClV%KD3~CJK41x@5
z43=Qh8dSf6TN-K%wqRMi|KAwY7~KAUWKd&p2a_IP(hE!mfyods838sk^8W`0HHIj#
z8=@Kb7}OYIz#?(~pD?H~<o>_RpvKSyc3U%;Yyp$4V6qKNwu8wIFxd$<a|)Q83MQw4
z$?0Hn2AG@$CPA$lHHMX7m#hMltHI<NFu4{?ZUU2Az~ojixeZKi2a`L%<SsC|8*Jtt
zFu4~@?gNwi!G1XiCJ%!}j)2vl0LQ{fFnJmrhG#%^62mz#`#hL^@&9E8bq1FIpBU5`
zSpPp`P-ox*lidHmFsL)|{J+nj&cF{Q1;M1y|Emn@3`+lRGN>~s|NqLM&Y<%DErU9P
z?*Hcu>I`OJ5%d2~8Ppjp|G#HYXRrd3*8g8Hs5997zs{h}VEg|ggF1s9Sf#`NYYgfP
zE@0ae{y%0=XJ`VO)(j?Fz+@|!Yy*?+V6p?OYd+YtMPR*)!Q>J!xfDz;1Cz_a<O(o(
z>i-)Cb%xVmf1UY%k3pT`ELh|mSmgZwdkh*3tp7hSXfSYrN$&rz88jGp|9@uC0Jn5C
zz%5h_24%3U3WFen27}uFCkz@4T3}KaEMf#^n}A7+|F0M{80`MPXV749_<xl_gTW1~
z*Bvb40cLysf61W15Cj$hrC1Gy@c*9}G#DcOzh%&1i2VN?bOI)rjAr0u&|ruGv*W<B
zpggF-kP9}U32b{am}~))tzfbZOtyo`4hA6x4Tesz+opiYsbF#%n4AtKXMo9BU~)Ft
z{d2(NTrfEgOs)jGd=;2n4JOxs$+ch-l%6#hwt#)K6-;geliR`M4lub3OzsBzVh@<y
z3nur0$^BqI9t4wz!7e!hR(}c{7iYob#s4oEG#Oa_e`C;O;Qs%DL6d?1|9%Ed20<_>
z^#2HhCWGw%cMO^ga$r&kG^@y<3}&nRzr>))p!WYWgC>I(nAH8hfkBhO2+TGCljaP3
z44Mp<3_=W=3^oiR44Mpf|35NlGC2Id&Y;QQ^8YM@CWG7mPYjw2?qJdbOnQOIfd5Yz
zG#LWHWDr;+7|ad<vqQn`i2u(SG#R4*-(k>Xi2eVJL6f2O|7!+Kh9<Dvn!#iXm}~`;
zZD6t;Om_U=!l22}33lfcFgX=WP6LzE!Q>1uISWkA2fKd}*v*T<<PtEs6ihAylgq*6
z3NX16?9NqSay6J-118skNl^T1GHeF>VhdPgE128{Cbxsh9bj@7nA{Ea=^ik-7fkL0
zlZU}>I|5d7=Ko9vO@{Me^3wm;3|b64|GzP4G4TF>&Y;EM0A@%2f5)K35CbMbZ2&EX
z{b2UN|L+)d!RmDxK((7LxOUTJ5c+?IL6<@0|2GC*2HpQ38FU$J|G#I@WdPM;x(u;k
zT?zl+GUzfifz>pF$rdo#3MSjYWILGb`2U<im*LF+&kVW@=l_3Z&|~2K|BgYAf%pG?
z20aFT1_1^=27&(<8T1$g!E7N0AqG7LB?fK=JqG3fZyEF$R2Y;P^cd9s-(}Ea&;paX
z4AKmG3`StK377=cdU_0&3?krp7cK@p25Sb;9E=Tv7=s>zErTe79)rXGXAF7_ZvS61
z=rOp1Ne?jT1tx>QWC)lHW8h=ZV~7B|HuC=`20ezT{}&ka7^1=U#{B=rpvMpkb}gtp
z(PPN{f0#j!p$Y8HW-!?TCR@Q|8<=bdlN}7o40;TmU^h<zlT*RuG%z_GOwIt4v%uth
zu=`hnU9}2Kt_G89z~ovmxe@HUP5+-W=rL>oi);mx+rZ>@Fu4Ov?gEp$!T#C<CijBL
zePD7w*k1?1<YBPwN5JY&fMe$*m^{P4&!ER}9!y^Rf0#j^f%ShAgFXWnnB@NdjX|G*
z_kSsaJ_A34ID<Ze!2jJ0`V4|#wh#jkgFb@_gA9W{gWCVg4EhXOU=q|<)n_mQvrWJx
zsIRKeVE(_CL7&0m|2qbK2Fw3*81xyez_Oseo<4&OSk{(-k3pZo;r{~$eFnGxUl{Zm
z+`*&=nDhdZL0~cjOh*2{!Jy9&_5U`5K0`EvD1$yj%>OqG`V6rQTnzdQasOX1=rbhz
z|Hz=vko*58gFZto12=;{LlfA|&0w+xOtyl_HZa)^COa5p8T1)C!7iKvCZ~eQX<%|X
zn4AG7XMxGtV1La4lXJo3JTN&Q?5~wz*RBGStHI<NFu4{?ZUK{9!Q?hDxgAXI0F%4G
z<bJTf4uZ+UU>lEs#ZQ1^<Rq9p^}mBbpW*cXE(U#uGvJUs3l=#C7C8?Vxd;x2OAOo$
z1`L+}e=!&^*!=&&V8Af{{|^QuaQnxIf%X4O1|tS8FbQh^7%}ktf5Kn{ZvPlDfZ9Js
z3_|~}F&Hr@{lCXx1aAKrf!jYu;P#IZgW3OY3`Pu~_Ky*`{bR&n_5TBd5d*0GV+3yh
z7=hbAMhu`9tPumK{bR)7^8Yh~5d*0GV+3yh7%_m_KSm6o_Ky(*sQqKa0BZjjF@V}X
zMhqSQZ!j1!fZ9Js42!^e7lX+qU~(y#Tm~kWgUJ<O@)X!5r@<~f^ZyZp5yM%q$T_gc
z`Tvg?j2T$|e`PRc0QudRf#?4V24e>P|H~MR83e(k(0|Zc2GGccF@x^^g$%|F*8krz
z7=vp@V{pxA%;56>ErT(*bTS5)PR0x#{~s_IGk{7bV+K&^WXuo(wk48*i@}&7ih+~C
zm>~wtP5|4O3pNup?qSRT8uu_}0F8SXGl0fDj2S@V9>xryaSvmLj{j>Ij2RYzZCnf{
zmw?HoU~(ClTn;8zfJspKW6ZD$?4H$Nat)YV3nn-H|G;1jE_sX@KqZec1E}ONW&o8u
z#tfj6$Cv?B@)$Gh2fOwlm;{wA#tf&y>@)v+8H^duf!XK5?2G^3F_?f;mkBs^nSfK5
z2{?pJ!0E~aoUTm3Dar(#qD;Uk%7g*bqBa4iC=+msG6AP36L5+$0jDSvaEdYkrzjI}
ziZTJWrA-)oL1TsBv}6KKOD5p7WCBi0Cg8MW0!~XN;Iw1{PD>^XptiIL1E?)+!T@SZ
zn=pXd(k2X`wzLTYs4Z>60BTE{Fo4pM2?HoCnJ}FB|AxT?oR&-&KxxT@;r#zM45nZ|
znlkW%Z-5pAlc18*ltJnLX9iOS(8z-+xD+)7kN=p0$A3&2bQ#1LOu?f-rr=Q^QwGcb
z-xy38tQmwEOc`wczhy9G0F{=e3=aSAGnj%)KU0QS22KW3hPeOl8B7@x{(oRFWyl4)
z0aW6dGJr}vQwC6pXUYI7@k|*&C7vk*sKhg60F`*A;L#*g@Mw}Lcr?irJep(*9!)X@
zk0zOdN0Uq$=7Ze;Y6Y2s$Bs<FWsWHWsGKonxcL7KXq5c_QwDPe9tIu;a|ZtZTN%t5
z1pc1~^=ZLuq5rEG%o$|=uVFA}kOPxS3}OuC49fq{GMIzMlg+^+hvwjsLvse*|H~N6
z8H~VYnt(}j1_=gp21^EM26F~020;dM25ZoWFoO+)EQ2|?9x-Qd`G1+goWbq?Nd|KU
zcQEMzCcVI9!2cTz<_v*gG6*aZ3}%Ob*`Z)|7}$h}|MwWo86y9`VlZcj`oEsRoFV%E
zI|g%x7_dm}|3?hw;2OuAA@~1k26Kkm|63T$!Q<ZM44`pua|Y13w>bl7+}oT1H12KA
z02=o;XXyC9gTWj;ZfVXi1?;b>U~(FmoDL>ufXP{4az5C1i@>g03?`R=$)#X&8JJuS
zCRc#Tm0-WD0+Xx3<Qg!!7EEqn5M?lD*vKHyV9v1V|3(ILhRtAKZvo401(Vyr<aRK*
z15EA$le@uTvIk7=1(W;0<bH6N90Zex!EQbRR(}E<`zOKVng4qk%o)yu$&26+yYznx
zg9QVq)nNf{!CHV@uoetL|F1JxFv$M@#$dr92PQ$SSqpG$)`9^ves00w@c%Z01%u1~
z4-6LIHmwCi99Si&Z)gE-*;+7wTDBGppq8x#1E^(d!2oL6S}=fGwiXN>|L-waFf0O_
zuoz4(0h3F?<T5b1989hNlV|?FX0Tv5|Nk|EB?Bu1AA=<W7ntP!f1kmUfd@3!4^H!z
z3<CeZfktrtZw2+E{_kb5WRU&;hQX3S4ooUBNHAD3DF463V9B8J{}6*EgWCW143-R9
zU=mcnSTY#=|HWX*U<4L10h8toG7OdsmJIR?mJC)5q70VclEIR}hCz|RlELBs69!8L
zm;cWhEE(MXUuLjma0infVA2ar2K;}-V95{&CWF8t!C-a>m>mjchyDM^V95~i{{@33
zL+t;543^*&Zpo1Q|0;teL+$^s43-Q{U^h2|$rdo#3MSjYWILGb_`i<9lA#mq@+n|)
zDwv!GCZ~hR8DMf2n4Ayx#Uil#7lX+qU~(y#Tm~kWgUJ<OawXW!tH9)HFu4Xyt_73p
zz_WB4859^S88-bt%V5c{8SJ|)VA-u;avPZ34kmYi$z5P_H`wocz~o*qxerVp2D|eJ
zSj`D={GJ4prx=78EE!IN<Mhn`Z48zSXTc)pz#`|tA{W7-cj^CE1}g?u243*ICWz$z
z|AE1Zf&c$)P+0~hh5lb;umX=vS}}k|CaoBh7z7xsz&$A|29^Ks7_1o7z`C@+r0)MC
z3|0(AV73XEG-nWIuwt-e5M!`numa5vg6D>;7;G3M7_7j##)`q^|6>L#2DksO7_1oF
z!K4S6^!opd!HOZ^|7!*-hCnbG1QrhlvqQk_P%t~}|9b{2@K}u%xc0FE*E?1WO<)%`
zgUJ>!*$O7xz+^j^?D&74!HS_1?Aj?{aw?dd1}3M2$r)gB7MPq5cKIT(n-_!0C17$X
zm|O-XmxIX_U~(ncovXm)YB0G5Os)l!8^Lo<o4`KZ4ED<wu=rLmxeZKi2a`L%<SsA?
z8nLlr0FBsKF@Q#FtQbHeHdYLW!7e-kR&xq$@@WQ11}la$|8Fr^F`Na9oCAxT2aAAe
zNoxk~|IZn$8F>Fc1J_=>4Au++|DP~eGYB$(*g_0k4Au<FU|AIgX$ETswf`>|tQoYx
zq%MO9gEfN@m~8?k&B3}rBgWPY*5J7`8wOzpYX)1eIH=ce&EWR`9fLK4JDBtUlU`sl
z2uy~6$%y~o7_1p0|9@q$2KTV68KS}F#QgusV9gK*c2DmAiwxEbO<-3wgUJ>!*$O7x
zz+^j^>|l^#ux984yKoAaoC+qVfywD$at4^31t#Z%?OF+T$tp0p8cePMlWW1`rvHx_
ztQod|*;~QnHZZvzOzr@ayTIgbuwV9o$-Q85ADG+^_RB#qc^GW(5wQ9b;P5{QCeMJw
z|2&wy`2Qk<4FmW8YYa9FJm48h&}f?txK(2V9&@u{5c<Ck)c*K?i@}CL4ooUBNHW+k
zDE~jkU;|$5V8fvH|2l&WgBF<7{ol)A!(j0L2ZIfR5m>|oOqw&uG1xFzGAJ_GFjz5&
zGuSX#Gl(+SfJ|brVXy_SIRK4;*f6+&)w}&a&tSvg4kkUoq!*Y3wQy{}EgTz$Ag~Ci
zg=51I0%n6+I5rGn|KBj!Fo0S(HVo1KKQh=b#QuN6V8ami{}qD`L+<}A3^ok4{~s{e
zfY)8vfY)8vfY)8vfY)8vfY)8vfY)8vFm(Liz+l7B33k;KFgX=WP6LzE!Q>1uISWkA
z2hA5UECRC^gUKaeaw(Wx1}2w-$rWI7CD`Swz~pK$xdu$G1(O@WBbA%}Z(^_kw`6P>
zwt&UAg2`=QayyvZ0Va2WNl;71h5^))v0(tUWNa8fEg2hz!(bbafYpFnFE$LI){70p
zDRB6oW)NhsVK@V}>nvCVG=Fcya2_mj5gcxp{yzZ4*8kTGw&3w|TLyvuM;L6uqv*Ea
zQFL1d&?vesgWCT)47LnfU{aR>G;?nRW}ARXi~pY(Y{9JzTX5>L1&^rPf=ASC!6WLn
z44@HpTL#dGx-A1}MBNrVqHfCo8d0}p2><_?!4_Of+cHG{f5~9W5cU5BgDnGSMBSDl
z1}qZy|1N_qc+A9>A@~0|23rQuh`KEUXhhwX0W_j+%K#ctw`Bm0sM|7tM$~N?KqKn5
z44q(iP63ls!Q?bBIUP*S0F$%8<ZQ5C=77n$U~(RqoDW*9!T=gqw`Bm0tJ^Yw#?@^Z
zK;!DR44`p!TZWAcTnx4hpniufcy!&C0W`X9%K#c(w`Bm0uG=zzM%Qf_K%?un44|^w
z7F<@_g3D@Ka9M53upjK-gJAM7*j1p>bz6oL;Mh3{CNKU!$6&{x^8X2g9fR8cs|<Dw
zT3`~iqQj2C2+TGClV<;)G1xJf|G&>*$6)dQ7K0sw<^RVFb_`bkZ!_32SpUDnV8>wd
z|0#nVgYEz840a3-3;_&w3{7B@o55rYm}~`;ZD6t;Om={Eb%Je~0w$+|$!TD6I+&aR
zCTD@k*<jn}fXTUFavqqR4>srU|EmlR;Q4(A@ch05188R70X(zsz+m<N8-oLbHJAjo
zs~x~I@($n`c?Sm2jJyK_Xhz<F0W>4;zyO+&cVGa`$U88A=G<KwSpVN<aAn{ElRW>Q
zGPp7b{eQ*a%AoZB34<$x!~aJNt_+d?KQp*8ME(E5;K~sF|2=~%Lkw6X_Wv6OSB8ZD
zuNhn!_Jc(Zg2_|=?=!eEoCcF;!Q{FB_Zi$6_!+nu+!zG@KVWcU5CpS9v+Zu+F+(>7
z&<wj9c!u4LA(DZc!HppbtR|X4fWeI+1}qZy|22afL+<~_3~mgd*=jci&}_9E18BC|
zjR7=U?ZyC_t#)Go%~rcH><8;Q2qr=E(QXVUz_y<RlNbL#X7FU-VNhrAWZ-4sX7FU-
z|9^(TlR@DBT?S7EK`<LMZsy4VYV&(CIDl0~G6*wxGDI;5F?cdWGw?HbGQ@yI62P*#
z|KBorGSo6~GI%nqV~}I;WZ1}{!QjcT>Hm8MPlo+ql?TD(39v0E!Q^QM5e83&GyhLB
zcru&=v(JOspqX(`hD!{b4Bp_F@&?D0H+YQ8n}PTLYX)xyrT<SEycrz+KW6Y|i2VPN
z!J8rK|7QkohM52F8N9)<<IS)i%svPvPyK(y;LUIvOr8ak=l(xp@MZ92;AHS)5cvO;
z!H+?Rft$gPLHYk120sRe|IZox7~K9pWAI~e2a_IP((C_Y20w-%Fc|_C0kt>$7(iq4
zehj()?=ko>tOSd%0+Xx3<Qg!!7EEpe+qDI3!d5W34NPtalRLoVE--lnOr8L%KM5u;
z{=df%z`*+dAwvKI7nlUC76@SA{r`#~0GxUQz^ONY0W^mmz!3HS6GH%a7A623CIR4C
zm;eUQEKC5yDX^~7|DQ1gFq{Rm&;5VK5C}Fu5Nv)R1IYY9u=#;t^8>-=2ZGHH1e+fS
zHa`$-ejwQVK(P6NVDkeRK;{R6%@1S%nIFW!`u_n#5Ca#O1m(UU2HyW~7=jpt{=Z}h
zVo>`3h#?4UOArHS9wZ1Hvq9iI7{n0s{}V$H11Jv$fo%+8*bf#t2qr;33Sv0@|0zQd
z*v24+bN`<*1cPl12HP0S0J1R{Y-2Du*9U|B6%4j97;IxOIM)Y*Z43t67!2;M2ZL=4
z2HP0S0J1R{?8jiRjlp0WgTXe2F!(aaF@!SkGVm~jGVuRD$q))I0YVuB!EB-bml#4B
zl>T332xV~i|AHZu0W?z($`HvQ$`HyB#URWO$^a?>LK$MfBDw!xGlVj1`u~<8lwm(u
z<RF*?<?B!eP`(ajIL*Mv5Xx}o{~3l*hI3%{c`*Cp|JMxR;5t2=LE!&QhH!A59u8jf
z6AoVU6Aqpg4QKH9|BWG>!3#`+W<tZkt_o*}W)NfuX8@H#;S9O|pD=_ofY#N7Gpquu
zTn#4IfXTIBa?}4W4B-r**~V}N&}?Hk18BA}oB=f37|sBiZ474s%{GQJfaVj!8BTy*
zbrMWo{Qra@l7W|jlOd9U|NmWvNCrVLDfIsVLnL@5LnL@kEfTz*BogfMNCwb+ZX|=(
z|IZAO3?P?BGJs}pBN-wYgcu?jq8J1jA{js{Ng^3yz#^bE9+3>7Inzi6(41)`18B}P
zk^wYl8VN47BEjyD1iL>H?EXlw`y;{bj|96vl3_pCw1Z$0<d;Z>(+oTekql@4-(iSk
zI0t5*|9^)e3hbjO2LAsKK&}CkLjRvIL@_A+zsnE>_E8i#E}|GfGe=Qimq&rrGDLx0
z9tC!J6gcFg7&iU?#1O>*nvscO0L{omF@R=dq8LCkGEoem8JQ>s(2PtJ*o9FHAQwh4
zfaYSNz%Gnp0L>{xfn6BI0GdsR2B(K;@LWJNcrGBC0W?z?4W0{#2G0dVGk|6)qrvGR
z8k`=Y!L?E}18A&1ngKLd7|j5hD~x6U%@sy7faVIL88-cY!4M4|tB(ec)klNJ>Z8G9
z_0iz5`e^W2eKZ4Td_I~1R71srXMSTD-2T61h-Gm9|Arx!!2`_p{Qrd^mci@)D~4Ex
zAh0;7Cm0KEH^wr6=1*f8Kr4k}!MQb-A@~1%hFFG`V7;rr<Z3Xv228F6lbim(V~Azg
z0yb?cnA`>?w}Z(YU~(6j-2MMELo5Sm9y68!G>;j}0Gh{)WjF$sz4-q=LmY$3|1S)2
z3~K+MF~ot_X2&t;f<-_pwBr~+E41Sn%>KV-hy$;QjbpI=|BfM!A%P))A&#L5tg;zQ
zwt&f2Fxdts+rebV|E~;j44q)}L94jq7(lDI;}}4zxZ@Z=tGMGBK&!ap7(lDI<G?Fm
z<G?Fm<G?Fm<G?Fm;}}5ew-Xpt{=a5OV9*7Vpf!^T4CeozGbAur{Qt_3z+eexTm65_
zkicO5|1m=XgU$aJ3<=;hlL-t>U|r2%vIR`Gg2^^8*$yT_tF;ptW`j+e119H!$$4OM
z{{I&Y$qeTI*D@r7ZA=EYNRt_?{=a5OX0Z8xks+DE_WxIgWUvXz3?LJd89*i^Gk{D;
zW|;r~B10~N`Txxfx!`rnxePY{?=$2w%>RF%p%%P4y_P}u|22kMaG2LJnEzkJP|INX
z{|ZAb*p^xb>;GpNY8h<)Z(^uru>F6Pp%%Ofy_NyA3cZ#Av<khJ0kjIemI1U1y_NyA
z3cZ%018l<l|C<;Z89-}P8X467-)3lJ&;paX{|_-VG8lo`CScO+|3ijG2J`<D85$WZ
z{$F8eWU%~yk)e^n>i<oKMh5HuyBQi8Z2s?KXk@VczlWg_ytbr~0kpQHkpZ-}q>%x%
zwxp2(w6>&?0kpQHk)h-NPKHK?PO$k?z~odgISous2a_|v<SZ~b8*J|!FgX`Y&I6P4
z!S){hf19BRydJd)ydJfQ0kj^q3A`S)3A`Q^)XS1$Xkq}ZM{NSn3N?Y(qc(xpqc$;s
z)}uBtfYzfnf!CupF@V;iHZg$Kqc(xpqc(xpqc(xpqc(xpqc(xpqc(xpqc(xpqc(xd
zq9$;8*aTjW+Qa}_kJ<!oEjNK%%S{ZR^{7n@VW3s};Pt3Y450O>P2lyYO$^ZtAa{b+
zqc$;s)}uBtfYzfnf!CupF@V~uO$?y*s7(x@^{7n@p!KLt450O>O$?y*s7(x@^{7n@
z9SlkgP2lyYO$?y*s7(x@^{7n@p!KLt450O>O$?y*s7(x@^{7n@p!KLt450O>P2lyY
zP2lyYP2lyYP2keIi2<}8wTS_=9<_-9v>vsI0kj^qi2<}8wF%t9ZvwaQo4_snCU6VC
z3EaYO0=Mv+7(nY$n;1arQJWY*>rtB+K<iPP7(nY$n;1arQJWY*>rtB+K<)n~2GDxc
zCh&UHCh&UHCI--Y)FuYddekNc(0bG+22iWJ865k~;Mi{l$9^+7_M5@659(b@F*Ji?
zzZo3+&EVK?2FHFgIQE;tvEK}i{bq3NH-lrp865k~;Mi{l$9^+7_M5@6-wclZW^n8`
zgJZuL9Q)1S*lz~Mels}so58W)437O~aO^jOW4{?3`_16kZwAMHGXp60o58W)437O~
zaO^jOW4{?3`_16kZwAMHGdT8}!Li>Aj{RnE>^FmBzZo3+&EVK?2FHFgIQE;tvEK}i
z{bq3NH-lrp865k~;Mi{l$9^+7_M5@6-wclZW^n8`gJZuL9Q)1S*lz~Mels}so58W)
z437O~aO^jOW4{?3`_16kZwAMHGdT8}!Li>Aj{RnE>^FmBzZo3+&EVK?2FHFgIQE;t
zvEK}i{T6WSw}4~61swY=;Mi{g$3AFOMT(&X9Q!Tc*lz*HehWDETfnj30*?I_aO}5$
zW4{F)`z_$uZvn@C3pn;$z_H%~j{O#J?6-hpzXcroE#TO10mps|IQCn>vEKrY{T6WS
zw}4~61swY=;Mi{g$9@Yq_FKTQ-vW;P7I5shfMdS}9Q!Tc*lz*HehWDETfnj30*?I_
zaO}5$W4{F)`z_$uZvn@C3pn;$z_H%~j{O#J?6-hpzXcroE#TO10mps|IQCn>vEKrY
z{T6WSw}4~61swY=;Mi{g$9@Yq_FKTQ-vW;P7I5shfMdS}9Q!Tc*lz*HehWDETfnj3
z0*?I_aO}5$W4{F)`z_$uZvn@C3pn;$z_H%~j{O#J?6-nrzZD$&t>D;i1;>6XIQBuK
zX;KWW;Mi{k$9^j~_FKWR-wKZXR&eaMf@8lG9Q&={*lz{Lek(ZkTfwp43Xc6&aO}5&
zW4{#~`>o*EZw1GGD>(LB!Li>8j{R0}?6-nrzZD$&t>D;i1;>6XIQCn?vEK@g{Z?@7
zw}NB86&(Am;Mi{k$9^j~_FKWR-wKZXR&eaMf@8lG9Q&={*lz{Lek(ZkTfwp43Xc6&
zaO}5&W4{#~`>o*EZw1GGD>(LB!Li>8j{R0}?6-nrzZD$&t>D;i1;>6XIQCn?vEK@g
z{Z?@7w}NB86&(Am;Mi{k$9^j~_FKWR-wKZXR&eaMf@8lG9Q&={*lz{Lek(ZkTfwp4
z3Xc6&aO}5&W4{d?`)%OZZv)4E8#wmcz_AY+MU`S`1IK<FIQHAXvEK%c{Wfsyw}E57
z4IKMz;Mi{i$9@|)_S?X*-v*BTHgN2>fn&c79Q$qH*lz>Jej7OU+rY8k29EtUaO}5%
zW4{d?`)%OZZv)4E8#wmcz_H&3j{P=p?6-kqzYQGwZQ$5%1IK<FIQHAXvEK%c{Wfsy
zw}E574IKMz;Mi{i$9@|)_S?X*-v*BTHgN2>fn&c79Q$qH*lz>Jej7OU+rY8k29EtU
zaO}5%W4{d?`)%OZZv)4E8#wmcz_H&3j{P=p?6-kqzYQGwZQ$5%1IK<FIQHAXvEK%c
z{Wfsyw}E574IKMz;Mi{i$9@|)_S?X*-v*BTHgN2>fn&c79Q*Cy*l!2Nemgk!+rhEl
z4vu}$=(!X_J2>{+!Li>Cj{SCU?6-qsza1R=?cmsN2giOpIQHAYvEL4k{dREdw}WH9
z9US}Z;Mi{m$9_9F_S?a+-wuxbc5v*sgJZuP9Q*Cy*l!2Nemgk!+rhEl4vzhHaO}5(
zW4|37`|aS^ZwJSIJ2>{+!Li>Cj{SCU?6-qsza1R=?cmsN2giOpIQHAYvEL4k{dREd
zw}WH99US}Z;Mi{m$9_9F_S?a+-wuxbc5v*sgJZuP9Q*Cy*l!2Nemgk!+rhEl4vzhH
zaO}5(W4|37`|aS^ZwJSIJ2>{+!Li>Cj{SCU?6-qsza1R=?cmsN2giOpIQHAYvEL4k
z{dREdw}WH99US}Z;Mnf~@1*De@1*Ep0PUpc0Pm#e0FN4VfX9nE7(hEII>2*49pIf5
z9pIf59Sop3pbiGmPKpi&&`ydD2GCB54)DBB2YBA613d530iO5i0MGk$faiTW!1F#G
z;CY`8@J@;j@Vrk4cqc^%L)8E03?1NkpAPU&iVpB7YX^8YQwMk_MF)7^rvp6i(*d6M
z=>X6Bbb#l5Iv7AHw*x%y(*fQ|(ZK-PNzuUo+DXyD0NP2>!2sGx(ZK-PNzuUo+DXyD
z0NP2>!2sGx(ZK-PNzuUo+DXv?p7H4b&-iqJXM8%qGd>;Q8J`aDTu%phuBQV$*V6%>
z>*)Z`^>l#edOE;!Jsse=o(}L_PX~Cervp6K)4>4RNzuUo+DXyD0NP2>!2p`;>0kiu
zr08G(?WE{n0PUpc1n=JJ1n=JJ1n=JJ1n=JJWB~2n>ICoJ>ICoJ>SO@z-s%MJ-s)t~
zWe{iR1n=JJWB~2n>SQou;9}?mt#o7P1g-sH=mgF1GjxJ?Z*_uqZ*_uKDt3ZL+d9F!
zw>lX>ySF+SK)bg(89=+YI>9m6$pG5D)d?OM?ga1N>I9Emb%J+qb%MuaI>EcQI>EcQ
zIvGH_w>lX>ySF+SK)bg(89=+YIvGH_w>lX>ySF+SK)bg(89=+YIvGH_w>lX>ySF+S
zK)bg(89=+YIvGH_w>lX>ySF+SKqGsd44~axoeZGeTb&G`-CLaupxs-Y;5h1J0PWuD
zWB~2n>SO@z-s)rk?cVBS0PWuDWB~2n>SO@z-s)rk?cVBS*!%wtLni}h_f{tZX!lkp
z18DbFCj)5rRwn~!_f{tZX!lkp18DbFCj)5rRwn~!_f{u(_f{tZX!lkp18DbF7dWqV
zf%94yIIneq^I8`;uXTa*S{FF4b%FC*7dWqVf%94yIIneq^I8`;uXTa*S{FF4b%FC*
z7dWqVf%94yIIneq^I8`;uXTZAstcUgy1;p@3!K-wz<I3;oY%U*d94ea*Sf%YtqYvj
zy1;p@3!K-wz<I3;oY%U*d94ea*Sf%YtqYvjy1;p@3!K-wz<I3;oY%U*d94ea*Sf%Y
ztqYvjy1;p@3!K-wz<I3;oY%U*d94ea*Sf%YtqUBFUEsXd1<q?-;Jnra&TC!Zyw(NI
zYhB>H)&<UMUEsXd1<q?-;Jnra&TC!Zyw(NIYhB>H)&<UMUEsXd4UVsFaC~)x<EtAS
zU)|vN>ITPGH#ok!!SU4%j<0TTe0784s~a3&-Qf7@2FF)7IKH~U@zo8EuWoRBb%W!p
z8ysKV;P~nW$5%HvzPiEj)eVlXZg6~cgX60k9ADkw`0577S2sAmy20_)4UVsFaC~)x
z<EtASU)|vN>ITPGH#ok!!SU4%j<0TTe0784s~a3&-Qf7@2FF)7IKH~U@zo8EuWoRB
zb%W!p8ysKV;P~nW$5%HvzPiEj)eVlXZg6~cgX60k9ADkw`0577S2sAmy20_)4UVsF
zaC~)x<EtASU)|vN>ITPGH#ok!!SU4tj;|hYeD#3is|OrkJ>dB20moMlIKFzo@zn#4
zuO4uG^?>862OM8L;P~nR$5#(HzIwp%)dP;N9&mj1fa9wN9A7=)`04@2R}VP8dcg73
z1CFmAaD4TE<EsZ8Up?UX>H)`B4>-Ph!12`sj;|hYeD#3is|OrkJ>dB20moMlIKFzo
z@zn#4uO4uG^?>862OM8L;P~nR$5#(HzIwp%)dP;N9&mj1fa9wN9A7=)`04@2R}VP8
zdcg731CFmAaD4TE<EsZ8Up?UX>H)`B4>-Ph!12`sj;|hYeD#3is|OrkJ>dB20moMl
zIKFzo@zo2CuU>F`^@8K87aU)`;P~nV$5$^nzIws&)eDZVUT}Q%g5#?f9ACZQ`0546
zS1&ledcpD43y!Z|aD4TG<Es}OU%lY?>IKJFFF3w>!SU4#j;~&DeD#9ks}~$!z2Nxj
z1;<w}IKFzp@zo2CuU>F`^@8K87aU)`;P~nV$5$^nzIws&)eDZVUT}Q%g5#?f9ACZQ
z`0546S1&ledcpD43y!Z|aD4TG<Es}OU%lY?>IKJFFF3w>!SU4#j;~&DeD#9ks}~$!
zz2Nxj1;<w}IKFzp@zo2CuU>F`^@8K87aU)`;P~nV$5$^nzIws&H3_^wViI`w@g(r>
z<4NGX1CzkJk0*h5NlXIol9<E*+I>6;JZCovyjNlp18Dd0B=BB|N#NbblfZi=CV}@#
zOkx1-KAr^LD=`VYS7H))uf!zq?&C?|-8YlKJ0>Q9ci&6`@0geb-Z3!=yklY#c*n#f
z@Q#T|;2jf_7(ly^CxLfNOakwim;~N2F$uh5ViI`A#3b;JiAmrc6O+I@CMGd}b{|h-
zIQ{<%!z2dK?&C?|-N%y{K)a78F@Sa-PXUL+6!1QgDGZ=pNK?S!Fa^8|X$m+Trhvm?
z3V0XN6!4yrDd2FJ0uF~M;Bc4%-ZL@<91c^!;V=cfXJiUE9HxN7VG1}Lrhr>wQ^4Uc
z1-uJs3OF35fWu)5I2@*c!(j?I9HxN7VG1}Lrhs>jOaX_(6b8^Pq$v!bT}V?HK)aBp
zFo1R;O<@4-LYl$=+J!WQ0kjKg3V7$p6!6ZGDc~8yDd3$WQ@}e%rhw-Sr-H*_DmWab
zg2Q1dI2@*e!(l2o9HxT9VJbKrrh>y^DmWabg2Q1dI2@*e!(l2o9HxT9VJbKrrh>y^
zDmWabg2Q1dI2@*e!(l2o9HxT9VJbKrrh>y^DmWabg2Q1dI2@*e!(l2o9HxT9VJbKr
zrh>y^DmWabg2Q1dI2@*e!(l2o9HxT9VJbKrrh>y^DmWabg2Q1dI2@*d!(kdY9HxQ8
zVH!9brh&s@8aN!Lfx}@MI2@*d!(kdY9HxQ8VH!9brh&s@8aN!Lfx}@MI2@*d!(kdY
z9HxQ8VH!9brh&s@8aN!Lfx}@MI2@*d!(kdY9HxQ8VH!9brh&s@8aN!Lfx}@MI2@*d
z!(kdY9HxQ8VH!9brh&s@8aN!Lfx}@MI2@*d!(kdY9HxQ8VH!9brh~&_IyfArgTrAu
zI2@*f!(lo&9HxWAVLCV*rh~&_IyfArgTrAuI2@*f!(lo&9HxWAVLCV*rh~&_IyfAr
zgTrAuI2@*f!(lo&9HxWAVLCV*rh~&_IyfArgTrAuI2@*f!(lo&9HxWAVLCV*rh~&_
zIyfArgTrAuI2@*f!(lo&9HxWAVLCV*rh~&_IyfArgTrAuI2>kx!(j$E9A<#SVFoxH
zW`M(C1~?pMfWu)1I2>kx!(j$E9A<#SVFoxHW`M(C1~?pMfWu)1I2>kx!(j$E9A<#S
zVFoxHW`M(C1~?pMfWu)1I2>kx!(j$E9A<#SVFoxHW`M(C1~?pMfWu)1I2>kx!(j$E
z9A<#SVFoxHW`M(C1~?pMfWu)1I2>kx!(j$E9A<#SVFoxHW`V<D7C0Pcfx}@II2>ky
z!(kRU9A<&TVHP+XW`V<D7C0Pcfx}@II2>ky!(kRU9A<&TVHP+XW`V<D7C0Pcfx}@I
zI2>ky!(kRU9A<&TVHP+XW`V<D7C0Pcfx}@II2>ky!(kRU9A<&TVHP+XW`V<D7C0Pc
zfx}@II2>ky!(kRU9A<&TVHP+XW`V<D7C0Pcfx}@II2>k!cMQ!2?--g5-Z3;AyklrK
zc*oFe@Q$I`;QjBj!8?X#gLe$g2Jd~C4c;*{8@vW-Hh9O-Y;ZedHh9O-Z19eu+29>R
zv%x!tW`lPO%?9rnnhoADG#k8QXf^|A$IxsB(2k+m44@rDvl&1;hGsK>b_~sC0PPr>
z%>ddlG#k8QXb#xTbHHw%19tNqu$$+A-8={E<~d+D&jGu64%p3ez;2!ccJmyto9BSt
zJO}LNIbb)>0lRq)*v)gmZk_{n^Bl07=YZWj2khoKU^mYJyLk@S&2zwRo&$FC9I%_`
zfZaR??B=;(H_rvTc`n$^bHQ$&3wHBdu$$+C-8>iU=DA=u&jq`AF4)a;!ET-lcJo}Y
zo9BYvJQwWdxnMWX1-p4J*v)gnZk`Kv^IWi-=Yril7wqP_U^mYNyLm3y&2zzSo(p#K
zT(Fzxf!#b0?B;o3H_rpRc^=r!^T2MN2X^y3u$$+B-8>KM=6PT@&jY)89@x$Ez;2!g
zcJn;2o9BVuJP+*Vd0;os1G{-1*v<36Zk`8r^E|Md=Yick5A5c7U^mYLyLlei&GW!+
zo)6w%09pwx#4sOxn#X+b{(|}7{RQ*E`wQkX$o{{@Fdw|X0Cb9(B*T30{(|}7{RQ*E
z`wQlS_ZQ4(0PQcB58hueAH2U{J_Bff!F<r3ONRO2{RQ*E`wQlS_ZQ3u?=P4STCvVB
zp8>SLU_N+%!F&eL{(|}7{RQ(GK>G{kGl2FN%x3`YFPIPBUoaoMzhFKCXn(<c@cx4N
z450l5^TGQI=7aYa%m?o;m=E4xFdw|XU_N+%!F=%kg8AV61@poC3+99O7t9CmFPIPB
zUoaoMzhFLif5Ck4{(|`op#25&89@6B<}-ly7tCh>?Jt<m0NP(Lp8>SLU_K~p80Lfb
z7tCh>?Jt<m0NP(Lp8>SLU_JwAf5ChP(Eft?450l5^BF+<3+6L`_7}`&0PQcB&j8wA
zFrNXmzhFKCXn(<c2GIV3`QZHp^TGQI<}-ly7tCh>?Jt<m0NP(Lp8>SLU_JwAf5ChP
z(Eft?;Qa;j!TSs5gZCHA2k$SK&j8wAFrNXmzhFLif5Ck4{(|`op#25&!TSs5Gl2FN
z%x3`YFPIPBUof8mw7+0J189H20&rchfPwY@eTD@LTws#t|4oJk4E+C(Ff3pY1e2hB
zlncPMzyb!9|4$hfFzEh2!LR^)lG_6C$#M%Atp8tSSioTO{~E&r2HXD^85S_u{r|wQ
z09@uTU~u{WkzoNt0$6Vom}~}<Enu=0Otyi^b}$Lr+qHmUKG?KHV7-gM<PtEs6ihAy
zlgq*63NU%<|8s@~45z{Np80>AVFANgu*f;E$oc=r85T0I{=dPnkbw(Kg4Ufb1fM{%
zkO6dZ*Fpw||F0PqGDQA=!mtqB-(1KL17?HPmM>)34`v?(lc)YaWLOAZy}Xd&ESL>i
zy}S~<wtOXcW$8)=(Ax5q;5n3)450HUR)XhER)XhER)W`-uLQ3xU&#PkS-KLuwtOXc
z72itmD!!HAIf#|uIf#|uIf#|uIf#|uIf#|uIf#|uwdE@rKx@laf@c|4g4dR>1g}9|
z30}Fki2-y{*(UH<^d<%e2403u3<(Up44c90iZ_GT6>kQwE8YxVSG*a#{&q8X{q1H3
z(7NT#;C02D!Rv}QgIDow2CpmL3|?2f8NB9pGk9I`X7HNZ&ER##TfnP)w}4mqZUL|I
z-2z_ay9K<icnf%4@fNTxTfnP)w}96bZvn6J-2z_ay9K<icnjFZEexP_#aqCue7AsY
z+yY+Zy9I3H7Vs+Htza9sf^FOiws9-i#;sr*w}Nfl3bt`8*v7438@GaO+zPgFE7-=Z
zU>moBZQKgBaVyxytza9sf^FOiws9NS#%*95w}EZk2DWh<*v4&O8@GXN+y=IB8`#Ee
zU>moAZQKU7aU0miZD1R>fo<Fdws9NS#%*95w}EZk4z_VS*v9Q(8@GdP+zz&JJJ`nU
zU>moCZQKsFaXZ+??O+?XgKgXnwsAYy#_eDmw}Wlm4z_VS*v9Q(8+U+h+yS<62iV3P
zU>kRUZQKF2aR=DO9bg-GfNk6Xws8m8#vNcAcYtl&0k&}m*v1`T8+U+h+yS<62iV44
zU>kRVZQKR6aTnOeU0@q`fo<Fcws9BO#$8|=cY$r(1-5Y)*v4I88+U<i+y%CA7ud#K
zU>kRVZQKR6@c_7FH~=mg4uEYu0JiY}ct!dFaE>?tw($Vi#slCL=?B0z9st{T09@lA
z0NZ!~Y~um2jRzP`{lCL-fZ;TlJPRhz{lCL-7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)
z7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)
z7(C8)7(C8)7(C8)7(C8)7(C8)7(C8)7+mfh29L8H29L8H29L8H2Df1jgU8tpgU8tp
zgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tp
zgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tp
zgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tpgU8tp
zGl1s!PJqwjJpn$C_XK!d>k06AyeGit@ty#;08fC=<2?aBkM|^4{YkL;lVJ5H!Rk+f
z)t>~bKM7WU60H6dgUbJ{45t{>{=a58#h?WyL2a;83`StK37E8G5N0?9+UL!1iou3~
zm*Esc6IfR>m}~))tzfbZOtyo`j{h|brx-fHCQJd7Q^DjkFgYDe&H$6Mz~p?e$%p^H
zW;ny3!XU+Ph5>Z$_Zjdx+h@S13ZDU|y)z6Z{~t1(0k11K!(je@FT)uIi~nyK&M;X1
z-^Or;!Rr4#hBFM-3_J{H7;OG;W;nwDI)m#BLlfA{W-!?TCR@Q|8<=bdlN}7A3}+ZX
zr-GjWr=K(6^m7KBe$If?&lzy~IRj2VXBcLK?VSTA=Yq+3U~)d#UeN6AS#SuS1&8oi
za0s6Tw{p*dL-;HMD1<?~{Dm3Lf<yQ$IE2rFL-;HMD1^^4fI|2z11N;gGJrz(ECVQn
z&oY2Q_$)Yt&oY2Q_$&h`gwHa7Lij8LD1^^4fI|2z11N;gGJrz(ECVQn&x1qwJUE2U
zgG2Z{IE2rGL-;&6gwKOR_&hj-&x1qwJUE2UgG2Z{IE2rGL-;&6gwKOR_&ftBgwHd8
zLijubD1^^5fI|2@11N;gGk`+)JUE2UgG2Z{IE2rGL-;&6gwKOR_&hj-&x1qwJOe0%
z&oh8R_&ftBgwHd8Lijv5?_310`o0KG?H9qTzAu7TeP3b#ok@NPyz2WBc-8kM@T%`i
z;8ow3z^lG5fmeNB0<ZeM1YY%h3B2n25_r}3B?i!{?@QoS-<QCvzAu4SeP05v`o6>f
zTJ_Dqz`&r-WW~S;VlXf;crr|ZvKblJ7~VkHObqOd%uqHngEXTvl+D7x#+VCbvoi27
z_CeWf41A0`p=@>r2PQ8ln}dO$X*HD1$-vKa8_MQpkYZMZvUwP!n2Vrn0R}ea*-*A1
z13U93C|ihujrjxvGXo<FGXoO?5A%62n+?pq3ud!3Ffj-)zXr2GcU4bfV1Tk28F(1(
zLD@_Ud<>tVY-R>CMs6sZg@K1L9?E8AkYFr<ve_797-vJ-><kTz5I1l#a4^U*bwkBD
z8RVFDL)qL6CQL9l@GzJ#+d#zy7<ia_p=?10KITPGwh#jk^DZ!(3+$J_U^X|%FDwwZ
z@qolxWWnMB3``7sEZShUFar~V5Q{aKEdo{py1AE;MHH+z7A!6X7S9K>Wx?!PFk2UF
z4%q!1V0&4>e&m3tUkOsf0}<a2W<%U?l);%HpP_)ElA(wplOdfUgQ0{$fx(EufWeT#
zfI)%5nIVxOk0GBSk0FyGnIVxOhe3hChoOWag+Y%&0n7%8r!wR*6f>kUq%bHjlrrQo
zq%fp16fr0;lrUs4q%tTlgfb+7#Y-4U859`Yz~+JM@d29((wz)eXUL$(V8FoOoL^8`
zl$oAUqF`iTXrSPnn3tcInVguT;8T*Kr=Z}InVgzeoSLFgnwOGVq)?KPst}q~npaY)
z;Fh0P0+CKmRWQ^uU_h~%0iue5A&4QBA)TR=A%`K6p@@MYC^fw_C$R`su>zVZ1%@!N
zr-~Ue!OmA;FksMQFb6poVyptx7=^IZqT<Z_JOu+ib6gI<X%ud~x@hi!dPWy&E4t%B
zu?C6_Z1D&3G$>Yzz`>BoP|Q#Q_8KJqN*Ib562UQ;3l5%ahGGT<hJ1!J0;vJ&Ace%d
z6r>nSEJ{@<N=?r!E=es)O;IQ*N=!-3O)SbTR>)67QU;1x1_l>!0xN-ruP;L)Ln1>u
zIFW%89D_?@PDy^Af^TACIw&QOt{#-QOBf0mtQhne^cl**P>&%6$ySh;L3ZkaFM`Wu
z&}U%CC@Cqh($_C9FV{-}J0LePF<mb?KNlp&P{Ke}1CcWdG(d?<u5iVE4EYR23?P5x
zFff3#E`k@#Py)`*ARiYoq%bfrpsGgjKp8y~k9-b8Dgy%pvU#9Hh0nwshCGH!ye2X*
zI5HG56hRA$L;`9NZUy-hmB#?Oa~50`{-4I+4`!efx4<{OFflMQurRPPuraVRa4>K(
za4~>xGvNi_k|Dq#$RNZZ%pk%b${@xd&LF`c$solb%^<@d%OJ-f&!E7d$e_fa%%H-c
z%Am%e&Y%H06PQ7pL5D$?L61QnT!I=g7&DkKm@=3#m@`;_&z-UcpG0EEV9(&d;K<;_
z;LPB{;L6~};LhN|;K|^{;LYH};LG60;Li}i5Xcb35X=z55Xun75Y7+*-aQ)45W^75
z5XTVDkid}0$i%Rip@pH9VK2iph8GNN41EmU409P)GAv<aX6RzL#n8+!nc)+|XNF#e
z=?onVhZ*iMd}Nr%u!`X;!xx5c42u}nGMr>M&ajRli6NO`0z(SJX@*pW6AY&q&NG~0
zILmO3;Uz;F!)1mG3>O*JGo&+oV3^2ojo}KzRfY_P*9>nNIvKJUG8wWNav4C8yMQ5|
zp^%|~p@^ZFp_HM7p^TxN;T1y#Llr|MLp4J!Lk+_YhJ_6E40Q|*42=wL7~V1LVz|z*
zfng)V4Tif6HyJiDY-ZTTu$|!u!xo0E3`ZH-8TK*kXV}BY!pO?V#>mdd!N|#8nwM!{
z;AjA)U3k*+ON)w23sQ?R^NUzqON;VBGDb#*rXbqYz|9RzyScf5Xg31`191C+160d0
fF#i9~zzj|W3=9m%|82MrN+Ks2=jecGk3Ek7BBTR`

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/werkzeug/debug/tbtools.py b/venv/lib/python3.7/site-packages/werkzeug/debug/tbtools.py
new file mode 100644
index 00000000..75b4cd2c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/debug/tbtools.py
@@ -0,0 +1,595 @@
+import codecs
+import inspect
+import os
+import re
+import sys
+import sysconfig
+import traceback
+import typing as t
+from html import escape
+from tokenize import TokenError
+from types import CodeType
+from types import TracebackType
+
+from .._internal import _to_str
+from ..filesystem import get_filesystem_encoding
+from ..utils import cached_property
+from .console import Console
+
+_coding_re = re.compile(br"coding[:=]\s*([-\w.]+)")
+_line_re = re.compile(br"^(.*?)$", re.MULTILINE)
+_funcdef_re = re.compile(r"^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)")
+
+HEADER = """\
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+  "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+  <head>
+    <title>%(title)s // Werkzeug Debugger</title>
+    <link rel="stylesheet" href="?__debugger__=yes&amp;cmd=resource&amp;f=style.css"
+        type="text/css">
+    <!-- We need to make sure this has a favicon so that the debugger does
+         not accidentally trigger a request to /favicon.ico which might
+         change the application's state. -->
+    <link rel="shortcut icon"
+        href="?__debugger__=yes&amp;cmd=resource&amp;f=console.png">
+    <script src="?__debugger__=yes&amp;cmd=resource&amp;f=debugger.js"></script>
+    <script type="text/javascript">
+      var TRACEBACK = %(traceback_id)d,
+          CONSOLE_MODE = %(console)s,
+          EVALEX = %(evalex)s,
+          EVALEX_TRUSTED = %(evalex_trusted)s,
+          SECRET = "%(secret)s";
+    </script>
+  </head>
+  <body style="background-color: #fff">
+    <div class="debugger">
+"""
+FOOTER = """\
+      <div class="footer">
+        Brought to you by <strong class="arthur">DON'T PANIC</strong>, your
+        friendly Werkzeug powered traceback interpreter.
+      </div>
+    </div>
+
+    <div class="pin-prompt">
+      <div class="inner">
+        <h3>Console Locked</h3>
+        <p>
+          The console is locked and needs to be unlocked by entering the PIN.
+          You can find the PIN printed out on the standard output of your
+          shell that runs the server.
+        <form>
+          <p>PIN:
+            <input type=text name=pin size=14>
+            <input type=submit name=btn value="Confirm Pin">
+        </form>
+      </div>
+    </div>
+  </body>
+</html>
+"""
+
+PAGE_HTML = (
+    HEADER
+    + """\
+<h1>%(exception_type)s</h1>
+<div class="detail">
+  <p class="errormsg">%(exception)s</p>
+</div>
+<h2 class="traceback">Traceback <em>(most recent call last)</em></h2>
+%(summary)s
+<div class="plain">
+    <p>
+      This is the Copy/Paste friendly version of the traceback.
+    </p>
+    <textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea>
+</div>
+<div class="explanation">
+  The debugger caught an exception in your WSGI application.  You can now
+  look at the traceback which led to the error.  <span class="nojavascript">
+  If you enable JavaScript you can also use additional features such as code
+  execution (if the evalex feature is enabled), automatic pasting of the
+  exceptions and much more.</span>
+</div>
+"""
+    + FOOTER
+    + """
+<!--
+
+%(plaintext_cs)s
+
+-->
+"""
+)
+
+CONSOLE_HTML = (
+    HEADER
+    + """\
+<h1>Interactive Console</h1>
+<div class="explanation">
+In this console you can execute Python expressions in the context of the
+application.  The initial namespace was created by the debugger automatically.
+</div>
+<div class="console"><div class="inner">The Console requires JavaScript.</div></div>
+"""
+    + FOOTER
+)
+
+SUMMARY_HTML = """\
+<div class="%(classes)s">
+  %(title)s
+  <ul>%(frames)s</ul>
+  %(description)s
+</div>
+"""
+
+FRAME_HTML = """\
+<div class="frame" id="frame-%(id)d">
+  <h4>File <cite class="filename">"%(filename)s"</cite>,
+      line <em class="line">%(lineno)s</em>,
+      in <code class="function">%(function_name)s</code></h4>
+  <div class="source %(library)s">%(lines)s</div>
+</div>
+"""
+
+SOURCE_LINE_HTML = """\
+<tr class="%(classes)s">
+  <td class=lineno>%(lineno)s</td>
+  <td>%(code)s</td>
+</tr>
+"""
+
+
+def render_console_html(secret: str, evalex_trusted: bool = True) -> str:
+    return CONSOLE_HTML % {
+        "evalex": "true",
+        "evalex_trusted": "true" if evalex_trusted else "false",
+        "console": "true",
+        "title": "Console",
+        "secret": secret,
+        "traceback_id": -1,
+    }
+
+
+def get_current_traceback(
+    ignore_system_exceptions: bool = False,
+    show_hidden_frames: bool = False,
+    skip: int = 0,
+) -> "Traceback":
+    """Get the current exception info as `Traceback` object.  Per default
+    calling this method will reraise system exceptions such as generator exit,
+    system exit or others.  This behavior can be disabled by passing `False`
+    to the function as first parameter.
+    """
+    info = t.cast(
+        t.Tuple[t.Type[BaseException], BaseException, TracebackType], sys.exc_info()
+    )
+    exc_type, exc_value, tb = info
+
+    if ignore_system_exceptions and exc_type in {
+        SystemExit,
+        KeyboardInterrupt,
+        GeneratorExit,
+    }:
+        raise
+    for _ in range(skip):
+        if tb.tb_next is None:
+            break
+        tb = tb.tb_next
+    tb = Traceback(exc_type, exc_value, tb)
+    if not show_hidden_frames:
+        tb.filter_hidden_frames()
+    return tb
+
+
+class Line:
+    """Helper for the source renderer."""
+
+    __slots__ = ("lineno", "code", "in_frame", "current")
+
+    def __init__(self, lineno: int, code: str) -> None:
+        self.lineno = lineno
+        self.code = code
+        self.in_frame = False
+        self.current = False
+
+    @property
+    def classes(self) -> t.List[str]:
+        rv = ["line"]
+        if self.in_frame:
+            rv.append("in-frame")
+        if self.current:
+            rv.append("current")
+        return rv
+
+    def render(self) -> str:
+        return SOURCE_LINE_HTML % {
+            "classes": " ".join(self.classes),
+            "lineno": self.lineno,
+            "code": escape(self.code),
+        }
+
+
+class Traceback:
+    """Wraps a traceback."""
+
+    def __init__(
+        self,
+        exc_type: t.Type[BaseException],
+        exc_value: BaseException,
+        tb: TracebackType,
+    ) -> None:
+        self.exc_type = exc_type
+        self.exc_value = exc_value
+        self.tb = tb
+
+        exception_type = exc_type.__name__
+        if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}:
+            exception_type = f"{exc_type.__module__}.{exception_type}"
+        self.exception_type = exception_type
+
+        self.groups = []
+        memo = set()
+        while True:
+            self.groups.append(Group(exc_type, exc_value, tb))
+            memo.add(id(exc_value))
+            exc_value = exc_value.__cause__ or exc_value.__context__  # type: ignore
+            if exc_value is None or id(exc_value) in memo:
+                break
+            exc_type = type(exc_value)
+            tb = exc_value.__traceback__  # type: ignore
+        self.groups.reverse()
+        self.frames = [frame for group in self.groups for frame in group.frames]
+
+    def filter_hidden_frames(self) -> None:
+        """Remove the frames according to the paste spec."""
+        for group in self.groups:
+            group.filter_hidden_frames()
+
+        self.frames[:] = [frame for group in self.groups for frame in group.frames]
+
+    @property
+    def is_syntax_error(self) -> bool:
+        """Is it a syntax error?"""
+        return isinstance(self.exc_value, SyntaxError)
+
+    @property
+    def exception(self) -> str:
+        """String representation of the final exception."""
+        return self.groups[-1].exception
+
+    def log(self, logfile: t.Optional[t.TextIO] = None) -> None:
+        """Log the ASCII traceback into a file object."""
+        if logfile is None:
+            logfile = sys.stderr
+        tb = f"{self.plaintext.rstrip()}\n"
+        logfile.write(tb)
+
+    def render_summary(self, include_title: bool = True) -> str:
+        """Render the traceback for the interactive console."""
+        title = ""
+        classes = ["traceback"]
+        if not self.frames:
+            classes.append("noframe-traceback")
+            frames = []
+        else:
+            library_frames = sum(frame.is_library for frame in self.frames)
+            mark_lib = 0 < library_frames < len(self.frames)
+            frames = [group.render(mark_lib=mark_lib) for group in self.groups]
+
+        if include_title:
+            if self.is_syntax_error:
+                title = "Syntax Error"
+            else:
+                title = "Traceback <em>(most recent call last)</em>:"
+
+        if self.is_syntax_error:
+            description = f"<pre class=syntaxerror>{escape(self.exception)}</pre>"
+        else:
+            description = f"<blockquote>{escape(self.exception)}</blockquote>"
+
+        return SUMMARY_HTML % {
+            "classes": " ".join(classes),
+            "title": f"<h3>{title if title else ''}</h3>",
+            "frames": "\n".join(frames),
+            "description": description,
+        }
+
+    def render_full(
+        self,
+        evalex: bool = False,
+        secret: t.Optional[str] = None,
+        evalex_trusted: bool = True,
+    ) -> str:
+        """Render the Full HTML page with the traceback info."""
+        exc = escape(self.exception)
+        return PAGE_HTML % {
+            "evalex": "true" if evalex else "false",
+            "evalex_trusted": "true" if evalex_trusted else "false",
+            "console": "false",
+            "title": exc,
+            "exception": exc,
+            "exception_type": escape(self.exception_type),
+            "summary": self.render_summary(include_title=False),
+            "plaintext": escape(self.plaintext),
+            "plaintext_cs": re.sub("-{2,}", "-", self.plaintext),
+            "traceback_id": self.id,
+            "secret": secret,
+        }
+
+    @cached_property
+    def plaintext(self) -> str:
+        return "\n".join([group.render_text() for group in self.groups])
+
+    @property
+    def id(self) -> int:
+        return id(self)
+
+
+class Group:
+    """A group of frames for an exception in a traceback. If the
+    exception has a ``__cause__`` or ``__context__``, there are multiple
+    exception groups.
+    """
+
+    def __init__(
+        self,
+        exc_type: t.Type[BaseException],
+        exc_value: BaseException,
+        tb: TracebackType,
+    ) -> None:
+        self.exc_type = exc_type
+        self.exc_value = exc_value
+        self.info = None
+        if exc_value.__cause__ is not None:
+            self.info = (
+                "The above exception was the direct cause of the following exception"
+            )
+        elif exc_value.__context__ is not None:
+            self.info = (
+                "During handling of the above exception, another exception occurred"
+            )
+
+        self.frames = []
+        while tb is not None:
+            self.frames.append(Frame(exc_type, exc_value, tb))
+            tb = tb.tb_next  # type: ignore
+
+    def filter_hidden_frames(self) -> None:
+        new_frames: t.List[Frame] = []
+        hidden = False
+
+        for frame in self.frames:
+            hide = frame.hide
+            if hide in ("before", "before_and_this"):
+                new_frames = []
+                hidden = False
+                if hide == "before_and_this":
+                    continue
+            elif hide in ("reset", "reset_and_this"):
+                hidden = False
+                if hide == "reset_and_this":
+                    continue
+            elif hide in ("after", "after_and_this"):
+                hidden = True
+                if hide == "after_and_this":
+                    continue
+            elif hide or hidden:
+                continue
+            new_frames.append(frame)
+
+        # if we only have one frame and that frame is from the codeop
+        # module, remove it.
+        if len(new_frames) == 1 and self.frames[0].module == "codeop":
+            del self.frames[:]
+
+        # if the last frame is missing something went terrible wrong :(
+        elif self.frames[-1] in new_frames:
+            self.frames[:] = new_frames
+
+    @property
+    def exception(self) -> str:
+        """String representation of the exception."""
+        buf = traceback.format_exception_only(self.exc_type, self.exc_value)
+        rv = "".join(buf).strip()
+        return _to_str(rv, "utf-8", "replace")
+
+    def render(self, mark_lib: bool = True) -> str:
+        out = []
+        if self.info is not None:
+            out.append(f'<li><div class="exc-divider">{self.info}:</div>')
+        for frame in self.frames:
+            title = f' title="{escape(frame.info)}"' if frame.info else ""
+            out.append(f"<li{title}>{frame.render(mark_lib=mark_lib)}")
+        return "\n".join(out)
+
+    def render_text(self) -> str:
+        out = []
+        if self.info is not None:
+            out.append(f"\n{self.info}:\n")
+        out.append("Traceback (most recent call last):")
+        for frame in self.frames:
+            out.append(frame.render_text())
+        out.append(self.exception)
+        return "\n".join(out)
+
+
+class Frame:
+    """A single frame in a traceback."""
+
+    def __init__(
+        self,
+        exc_type: t.Type[BaseException],
+        exc_value: BaseException,
+        tb: TracebackType,
+    ) -> None:
+        self.lineno = tb.tb_lineno
+        self.function_name = tb.tb_frame.f_code.co_name
+        self.locals = tb.tb_frame.f_locals
+        self.globals = tb.tb_frame.f_globals
+
+        fn = inspect.getsourcefile(tb) or inspect.getfile(tb)
+        if fn[-4:] in (".pyo", ".pyc"):
+            fn = fn[:-1]
+        # if it's a file on the file system resolve the real filename.
+        if os.path.isfile(fn):
+            fn = os.path.realpath(fn)
+        self.filename = _to_str(fn, get_filesystem_encoding())
+        self.module = self.globals.get("__name__", self.locals.get("__name__"))
+        self.loader = self.globals.get("__loader__", self.locals.get("__loader__"))
+        self.code = tb.tb_frame.f_code
+
+        # support for paste's traceback extensions
+        self.hide = self.locals.get("__traceback_hide__", False)
+        info = self.locals.get("__traceback_info__")
+        if info is not None:
+            info = _to_str(info, "utf-8", "replace")
+        self.info = info
+
+    def render(self, mark_lib: bool = True) -> str:
+        """Render a single frame in a traceback."""
+        return FRAME_HTML % {
+            "id": self.id,
+            "filename": escape(self.filename),
+            "lineno": self.lineno,
+            "function_name": escape(self.function_name),
+            "lines": self.render_line_context(),
+            "library": "library" if mark_lib and self.is_library else "",
+        }
+
+    @cached_property
+    def is_library(self) -> bool:
+        return any(
+            self.filename.startswith(os.path.realpath(path))
+            for path in sysconfig.get_paths().values()
+        )
+
+    def render_text(self) -> str:
+        return (
+            f'  File "{self.filename}", line {self.lineno}, in {self.function_name}\n'
+            f"    {self.current_line.strip()}"
+        )
+
+    def render_line_context(self) -> str:
+        before, current, after = self.get_context_lines()
+        rv = []
+
+        def render_line(line: str, cls: str) -> None:
+            line = line.expandtabs().rstrip()
+            stripped_line = line.strip()
+            prefix = len(line) - len(stripped_line)
+            rv.append(
+                f'<pre class="line {cls}"><span class="ws">{" " * prefix}</span>'
+                f"{escape(stripped_line) if stripped_line else ' '}</pre>"
+            )
+
+        for line in before:
+            render_line(line, "before")
+        render_line(current, "current")
+        for line in after:
+            render_line(line, "after")
+
+        return "\n".join(rv)
+
+    def get_annotated_lines(self) -> t.List[Line]:
+        """Helper function that returns lines with extra information."""
+        lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)]
+
+        # find function definition and mark lines
+        if hasattr(self.code, "co_firstlineno"):
+            lineno = self.code.co_firstlineno - 1
+            while lineno > 0:
+                if _funcdef_re.match(lines[lineno].code):
+                    break
+                lineno -= 1
+            try:
+                offset = len(inspect.getblock([f"{x.code}\n" for x in lines[lineno:]]))
+            except TokenError:
+                offset = 0
+            for line in lines[lineno : lineno + offset]:
+                line.in_frame = True
+
+        # mark current line
+        try:
+            lines[self.lineno - 1].current = True
+        except IndexError:
+            pass
+
+        return lines
+
+    def eval(self, code: t.Union[str, CodeType], mode: str = "single") -> t.Any:
+        """Evaluate code in the context of the frame."""
+        if isinstance(code, str):
+            code = compile(code, "<interactive>", mode)
+        return eval(code, self.globals, self.locals)
+
+    @cached_property
+    def sourcelines(self) -> t.List[str]:
+        """The sourcecode of the file as list of strings."""
+        # get sourcecode from loader or file
+        source = None
+        if self.loader is not None:
+            try:
+                if hasattr(self.loader, "get_source"):
+                    source = self.loader.get_source(self.module)
+                elif hasattr(self.loader, "get_source_by_code"):
+                    source = self.loader.get_source_by_code(self.code)
+            except Exception:
+                # we munch the exception so that we don't cause troubles
+                # if the loader is broken.
+                pass
+
+        if source is None:
+            try:
+                with open(self.filename, mode="rb") as f:
+                    source = f.read()
+            except OSError:
+                return []
+
+        # already str?  return right away
+        if isinstance(source, str):
+            return source.splitlines()
+
+        charset = "utf-8"
+        if source.startswith(codecs.BOM_UTF8):
+            source = source[3:]
+        else:
+            for idx, match in enumerate(_line_re.finditer(source)):
+                coding_match = _coding_re.search(match.group())
+                if coding_match is not None:
+                    charset = coding_match.group(1).decode("utf-8")
+                    break
+                if idx > 1:
+                    break
+
+        # on broken cookies we fall back to utf-8 too
+        charset = _to_str(charset)
+        try:
+            codecs.lookup(charset)
+        except LookupError:
+            charset = "utf-8"
+
+        return source.decode(charset, "replace").splitlines()
+
+    def get_context_lines(
+        self, context: int = 5
+    ) -> t.Tuple[t.List[str], str, t.List[str]]:
+        before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1]
+        past = self.sourcelines[self.lineno : self.lineno + context]
+        return (before, self.current_line, past)
+
+    @property
+    def current_line(self) -> str:
+        try:
+            return self.sourcelines[self.lineno - 1]
+        except IndexError:
+            return ""
+
+    @cached_property
+    def console(self) -> Console:
+        return Console(self.globals, self.locals)
+
+    @property
+    def id(self) -> int:
+        return id(self)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/exceptions.py b/venv/lib/python3.7/site-packages/werkzeug/exceptions.py
new file mode 100644
index 00000000..16c3964d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/exceptions.py
@@ -0,0 +1,943 @@
+"""Implements a number of Python exceptions which can be raised from within
+a view to trigger a standard HTTP non-200 response.
+
+Usage Example
+-------------
+
+.. code-block:: python
+
+    from werkzeug.wrappers.request import Request
+    from werkzeug.exceptions import HTTPException, NotFound
+
+    def view(request):
+        raise NotFound()
+
+    @Request.application
+    def application(request):
+        try:
+            return view(request)
+        except HTTPException as e:
+            return e
+
+As you can see from this example those exceptions are callable WSGI
+applications. However, they are not Werkzeug response objects. You
+can get a response object by calling ``get_response()`` on a HTTP
+exception.
+
+Keep in mind that you may have to pass an environ (WSGI) or scope
+(ASGI) to ``get_response()`` because some errors fetch additional
+information relating to the request.
+
+If you want to hook in a different exception page to say, a 404 status
+code, you can add a second except for a specific subclass of an error:
+
+.. code-block:: python
+
+    @Request.application
+    def application(request):
+        try:
+            return view(request)
+        except NotFound as e:
+            return not_found(request)
+        except HTTPException as e:
+            return e
+
+"""
+import sys
+import typing as t
+import warnings
+from datetime import datetime
+from html import escape
+
+from ._internal import _get_environ
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIEnvironment
+    from .datastructures import WWWAuthenticate
+    from .sansio.response import Response
+    from .wrappers.response import Response as WSGIResponse  # noqa: F401
+
+
+class HTTPException(Exception):
+    """The base class for all HTTP exceptions. This exception can be called as a WSGI
+    application to render a default error page or you can catch the subclasses
+    of it independently and render nicer error messages.
+    """
+
+    code: t.Optional[int] = None
+    description: t.Optional[str] = None
+
+    def __init__(
+        self,
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+    ) -> None:
+        super().__init__()
+        if description is not None:
+            self.description = description
+        self.response = response
+
+    @classmethod
+    def wrap(
+        cls, exception: t.Type[BaseException], name: t.Optional[str] = None
+    ) -> t.Type["HTTPException"]:
+        """Create an exception that is a subclass of the calling HTTP
+        exception and the ``exception`` argument.
+
+        The first argument to the class will be passed to the
+        wrapped ``exception``, the rest to the HTTP exception. If
+        ``e.args`` is not empty and ``e.show_exception`` is ``True``,
+        the wrapped exception message is added to the HTTP error
+        description.
+
+        .. deprecated:: 2.0
+            Will be removed in Werkzeug 2.1. Create a subclass manually
+            instead.
+
+        .. versionchanged:: 0.15.5
+            The ``show_exception`` attribute controls whether the
+            description includes the wrapped exception message.
+
+        .. versionchanged:: 0.15.0
+            The description includes the wrapped exception message.
+        """
+        warnings.warn(
+            "'HTTPException.wrap' is deprecated and will be removed in"
+            " Werkzeug 2.1. Create a subclass manually instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+        class newcls(cls, exception):  # type: ignore
+            _description = cls.description
+            show_exception = False
+
+            def __init__(
+                self, arg: t.Optional[t.Any] = None, *args: t.Any, **kwargs: t.Any
+            ) -> None:
+                super().__init__(*args, **kwargs)
+
+                if arg is None:
+                    exception.__init__(self)
+                else:
+                    exception.__init__(self, arg)
+
+            @property
+            def description(self) -> str:
+                if self.show_exception:
+                    return (
+                        f"{self._description}\n"
+                        f"{exception.__name__}: {exception.__str__(self)}"
+                    )
+
+                return self._description  # type: ignore
+
+            @description.setter
+            def description(self, value: str) -> None:
+                self._description = value
+
+        newcls.__module__ = sys._getframe(1).f_globals["__name__"]
+        name = name or cls.__name__ + exception.__name__
+        newcls.__name__ = newcls.__qualname__ = name
+        return newcls
+
+    @property
+    def name(self) -> str:
+        """The status name."""
+        from .http import HTTP_STATUS_CODES
+
+        return HTTP_STATUS_CODES.get(self.code, "Unknown Error")  # type: ignore
+
+    def get_description(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> str:
+        """Get the description."""
+        if self.description is None:
+            description = ""
+        elif not isinstance(self.description, str):
+            description = str(self.description)
+        else:
+            description = self.description
+
+        description = escape(description).replace("\n", "<br>")
+        return f"<p>{description}</p>"
+
+    def get_body(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> str:
+        """Get the HTML body."""
+        return (
+            '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
+            f"<title>{self.code} {escape(self.name)}</title>\n"
+            f"<h1>{escape(self.name)}</h1>\n"
+            f"{self.get_description(environ)}\n"
+        )
+
+    def get_headers(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> t.List[t.Tuple[str, str]]:
+        """Get a list of headers."""
+        return [("Content-Type", "text/html; charset=utf-8")]
+
+    def get_response(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> "Response":
+        """Get a response object.  If one was passed to the exception
+        it's returned directly.
+
+        :param environ: the optional environ for the request.  This
+                        can be used to modify the response depending
+                        on how the request looked like.
+        :return: a :class:`Response` object or a subclass thereof.
+        """
+        from .wrappers.response import Response as WSGIResponse  # noqa: F811
+
+        if self.response is not None:
+            return self.response
+        if environ is not None:
+            environ = _get_environ(environ)
+        headers = self.get_headers(environ, scope)
+        return WSGIResponse(self.get_body(environ, scope), self.code, headers)
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        """Call the exception as WSGI application.
+
+        :param environ: the WSGI environment.
+        :param start_response: the response callable provided by the WSGI
+                               server.
+        """
+        response = t.cast("WSGIResponse", self.get_response(environ))
+        return response(environ, start_response)
+
+    def __str__(self) -> str:
+        code = self.code if self.code is not None else "???"
+        return f"{code} {self.name}: {self.description}"
+
+    def __repr__(self) -> str:
+        code = self.code if self.code is not None else "???"
+        return f"<{type(self).__name__} '{code}: {self.name}'>"
+
+
+class BadRequest(HTTPException):
+    """*400* `Bad Request`
+
+    Raise if the browser sends something to the application the application
+    or server cannot handle.
+    """
+
+    code = 400
+    description = (
+        "The browser (or proxy) sent a request that this server could "
+        "not understand."
+    )
+
+
+class BadRequestKeyError(BadRequest, KeyError):
+    """An exception that is used to signal both a :exc:`KeyError` and a
+    :exc:`BadRequest`. Used by many of the datastructures.
+    """
+
+    _description = BadRequest.description
+    #: Show the KeyError along with the HTTP error message in the
+    #: response. This should be disabled in production, but can be
+    #: useful in a debug mode.
+    show_exception = False
+
+    def __init__(self, arg: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any):
+        super().__init__(*args, **kwargs)
+
+        if arg is None:
+            KeyError.__init__(self)
+        else:
+            KeyError.__init__(self, arg)
+
+    @property  # type: ignore
+    def description(self) -> str:  # type: ignore
+        if self.show_exception:
+            return (
+                f"{self._description}\n"
+                f"{KeyError.__name__}: {KeyError.__str__(self)}"
+            )
+
+        return self._description
+
+    @description.setter
+    def description(self, value: str) -> None:
+        self._description = value
+
+
+class ClientDisconnected(BadRequest):
+    """Internal exception that is raised if Werkzeug detects a disconnected
+    client.  Since the client is already gone at that point attempting to
+    send the error message to the client might not work and might ultimately
+    result in another exception in the server.  Mainly this is here so that
+    it is silenced by default as far as Werkzeug is concerned.
+
+    Since disconnections cannot be reliably detected and are unspecified
+    by WSGI to a large extent this might or might not be raised if a client
+    is gone.
+
+    .. versionadded:: 0.8
+    """
+
+
+class SecurityError(BadRequest):
+    """Raised if something triggers a security error.  This is otherwise
+    exactly like a bad request error.
+
+    .. versionadded:: 0.9
+    """
+
+
+class BadHost(BadRequest):
+    """Raised if the submitted host is badly formatted.
+
+    .. versionadded:: 0.11.2
+    """
+
+
+class Unauthorized(HTTPException):
+    """*401* ``Unauthorized``
+
+    Raise if the user is not authorized to access a resource.
+
+    The ``www_authenticate`` argument should be used to set the
+    ``WWW-Authenticate`` header. This is used for HTTP basic auth and
+    other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate`
+    to create correctly formatted values. Strictly speaking a 401
+    response is invalid if it doesn't provide at least one value for
+    this header, although real clients typically don't care.
+
+    :param description: Override the default message used for the body
+        of the response.
+    :param www-authenticate: A single value, or list of values, for the
+        WWW-Authenticate header(s).
+
+    .. versionchanged:: 2.0
+        Serialize multiple ``www_authenticate`` items into multiple
+        ``WWW-Authenticate`` headers, rather than joining them
+        into a single value, for better interoperability.
+
+    .. versionchanged:: 0.15.3
+        If the ``www_authenticate`` argument is not set, the
+        ``WWW-Authenticate`` header is not set.
+
+    .. versionchanged:: 0.15.3
+        The ``response`` argument was restored.
+
+    .. versionchanged:: 0.15.1
+        ``description`` was moved back as the first argument, restoring
+         its previous position.
+
+    .. versionchanged:: 0.15.0
+        ``www_authenticate`` was added as the first argument, ahead of
+        ``description``.
+    """
+
+    code = 401
+    description = (
+        "The server could not verify that you are authorized to access"
+        " the URL requested. You either supplied the wrong credentials"
+        " (e.g. a bad password), or your browser doesn't understand"
+        " how to supply the credentials required."
+    )
+
+    def __init__(
+        self,
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+        www_authenticate: t.Optional[
+            t.Union["WWWAuthenticate", t.Iterable["WWWAuthenticate"]]
+        ] = None,
+    ) -> None:
+        super().__init__(description, response)
+
+        from .datastructures import WWWAuthenticate
+
+        if isinstance(www_authenticate, WWWAuthenticate):
+            www_authenticate = (www_authenticate,)
+
+        self.www_authenticate = www_authenticate
+
+    def get_headers(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> t.List[t.Tuple[str, str]]:
+        headers = super().get_headers(environ, scope)
+        if self.www_authenticate:
+            headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate)
+        return headers
+
+
+class Forbidden(HTTPException):
+    """*403* `Forbidden`
+
+    Raise if the user doesn't have the permission for the requested resource
+    but was authenticated.
+    """
+
+    code = 403
+    description = (
+        "You don't have the permission to access the requested"
+        " resource. It is either read-protected or not readable by the"
+        " server."
+    )
+
+
+class NotFound(HTTPException):
+    """*404* `Not Found`
+
+    Raise if a resource does not exist and never existed.
+    """
+
+    code = 404
+    description = (
+        "The requested URL was not found on the server. If you entered"
+        " the URL manually please check your spelling and try again."
+    )
+
+
+class MethodNotAllowed(HTTPException):
+    """*405* `Method Not Allowed`
+
+    Raise if the server used a method the resource does not handle.  For
+    example `POST` if the resource is view only.  Especially useful for REST.
+
+    The first argument for this exception should be a list of allowed methods.
+    Strictly speaking the response would be invalid if you don't provide valid
+    methods in the header which you can do with that list.
+    """
+
+    code = 405
+    description = "The method is not allowed for the requested URL."
+
+    def __init__(
+        self,
+        valid_methods: t.Optional[t.Iterable[str]] = None,
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+    ) -> None:
+        """Takes an optional list of valid http methods
+        starting with werkzeug 0.3 the list will be mandatory."""
+        super().__init__(description=description, response=response)
+        self.valid_methods = valid_methods
+
+    def get_headers(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> t.List[t.Tuple[str, str]]:
+        headers = super().get_headers(environ, scope)
+        if self.valid_methods:
+            headers.append(("Allow", ", ".join(self.valid_methods)))
+        return headers
+
+
+class NotAcceptable(HTTPException):
+    """*406* `Not Acceptable`
+
+    Raise if the server can't return any content conforming to the
+    `Accept` headers of the client.
+    """
+
+    code = 406
+    description = (
+        "The resource identified by the request is only capable of"
+        " generating response entities which have content"
+        " characteristics not acceptable according to the accept"
+        " headers sent in the request."
+    )
+
+
+class RequestTimeout(HTTPException):
+    """*408* `Request Timeout`
+
+    Raise to signalize a timeout.
+    """
+
+    code = 408
+    description = (
+        "The server closed the network connection because the browser"
+        " didn't finish the request within the specified time."
+    )
+
+
+class Conflict(HTTPException):
+    """*409* `Conflict`
+
+    Raise to signal that a request cannot be completed because it conflicts
+    with the current state on the server.
+
+    .. versionadded:: 0.7
+    """
+
+    code = 409
+    description = (
+        "A conflict happened while processing the request. The"
+        " resource might have been modified while the request was being"
+        " processed."
+    )
+
+
+class Gone(HTTPException):
+    """*410* `Gone`
+
+    Raise if a resource existed previously and went away without new location.
+    """
+
+    code = 410
+    description = (
+        "The requested URL is no longer available on this server and"
+        " there is no forwarding address. If you followed a link from a"
+        " foreign page, please contact the author of this page."
+    )
+
+
+class LengthRequired(HTTPException):
+    """*411* `Length Required`
+
+    Raise if the browser submitted data but no ``Content-Length`` header which
+    is required for the kind of processing the server does.
+    """
+
+    code = 411
+    description = (
+        "A request with this method requires a valid <code>Content-"
+        "Length</code> header."
+    )
+
+
+class PreconditionFailed(HTTPException):
+    """*412* `Precondition Failed`
+
+    Status code used in combination with ``If-Match``, ``If-None-Match``, or
+    ``If-Unmodified-Since``.
+    """
+
+    code = 412
+    description = (
+        "The precondition on the request for the URL failed positive evaluation."
+    )
+
+
+class RequestEntityTooLarge(HTTPException):
+    """*413* `Request Entity Too Large`
+
+    The status code one should return if the data submitted exceeded a given
+    limit.
+    """
+
+    code = 413
+    description = "The data value transmitted exceeds the capacity limit."
+
+
+class RequestURITooLarge(HTTPException):
+    """*414* `Request URI Too Large`
+
+    Like *413* but for too long URLs.
+    """
+
+    code = 414
+    description = (
+        "The length of the requested URL exceeds the capacity limit for"
+        " this server. The request cannot be processed."
+    )
+
+
+class UnsupportedMediaType(HTTPException):
+    """*415* `Unsupported Media Type`
+
+    The status code returned if the server is unable to handle the media type
+    the client transmitted.
+    """
+
+    code = 415
+    description = (
+        "The server does not support the media type transmitted in the request."
+    )
+
+
+class RequestedRangeNotSatisfiable(HTTPException):
+    """*416* `Requested Range Not Satisfiable`
+
+    The client asked for an invalid part of the file.
+
+    .. versionadded:: 0.7
+    """
+
+    code = 416
+    description = "The server cannot provide the requested range."
+
+    def __init__(
+        self,
+        length: t.Optional[int] = None,
+        units: str = "bytes",
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+    ) -> None:
+        """Takes an optional `Content-Range` header value based on ``length``
+        parameter.
+        """
+        super().__init__(description=description, response=response)
+        self.length = length
+        self.units = units
+
+    def get_headers(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> t.List[t.Tuple[str, str]]:
+        headers = super().get_headers(environ, scope)
+        if self.length is not None:
+            headers.append(("Content-Range", f"{self.units} */{self.length}"))
+        return headers
+
+
+class ExpectationFailed(HTTPException):
+    """*417* `Expectation Failed`
+
+    The server cannot meet the requirements of the Expect request-header.
+
+    .. versionadded:: 0.7
+    """
+
+    code = 417
+    description = "The server could not meet the requirements of the Expect header"
+
+
+class ImATeapot(HTTPException):
+    """*418* `I'm a teapot`
+
+    The server should return this if it is a teapot and someone attempted
+    to brew coffee with it.
+
+    .. versionadded:: 0.7
+    """
+
+    code = 418
+    description = "This server is a teapot, not a coffee machine"
+
+
+class UnprocessableEntity(HTTPException):
+    """*422* `Unprocessable Entity`
+
+    Used if the request is well formed, but the instructions are otherwise
+    incorrect.
+    """
+
+    code = 422
+    description = (
+        "The request was well-formed but was unable to be followed due"
+        " to semantic errors."
+    )
+
+
+class Locked(HTTPException):
+    """*423* `Locked`
+
+    Used if the resource that is being accessed is locked.
+    """
+
+    code = 423
+    description = "The resource that is being accessed is locked."
+
+
+class FailedDependency(HTTPException):
+    """*424* `Failed Dependency`
+
+    Used if the method could not be performed on the resource
+    because the requested action depended on another action and that action failed.
+    """
+
+    code = 424
+    description = (
+        "The method could not be performed on the resource because the"
+        " requested action depended on another action and that action"
+        " failed."
+    )
+
+
+class PreconditionRequired(HTTPException):
+    """*428* `Precondition Required`
+
+    The server requires this request to be conditional, typically to prevent
+    the lost update problem, which is a race condition between two or more
+    clients attempting to update a resource through PUT or DELETE. By requiring
+    each client to include a conditional header ("If-Match" or "If-Unmodified-
+    Since") with the proper value retained from a recent GET request, the
+    server ensures that each client has at least seen the previous revision of
+    the resource.
+    """
+
+    code = 428
+    description = (
+        "This request is required to be conditional; try using"
+        ' "If-Match" or "If-Unmodified-Since".'
+    )
+
+
+class _RetryAfter(HTTPException):
+    """Adds an optional ``retry_after`` parameter which will set the
+    ``Retry-After`` header. May be an :class:`int` number of seconds or
+    a :class:`~datetime.datetime`.
+    """
+
+    def __init__(
+        self,
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+        retry_after: t.Optional[t.Union[datetime, int]] = None,
+    ) -> None:
+        super().__init__(description, response)
+        self.retry_after = retry_after
+
+    def get_headers(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> t.List[t.Tuple[str, str]]:
+        headers = super().get_headers(environ, scope)
+
+        if self.retry_after:
+            if isinstance(self.retry_after, datetime):
+                from .http import http_date
+
+                value = http_date(self.retry_after)
+            else:
+                value = str(self.retry_after)
+
+            headers.append(("Retry-After", value))
+
+        return headers
+
+
+class TooManyRequests(_RetryAfter):
+    """*429* `Too Many Requests`
+
+    The server is limiting the rate at which this user receives
+    responses, and this request exceeds that rate. (The server may use
+    any convenient method to identify users and their request rates).
+    The server may include a "Retry-After" header to indicate how long
+    the user should wait before retrying.
+
+    :param retry_after: If given, set the ``Retry-After`` header to this
+        value. May be an :class:`int` number of seconds or a
+        :class:`~datetime.datetime`.
+
+    .. versionchanged:: 1.0
+        Added ``retry_after`` parameter.
+    """
+
+    code = 429
+    description = "This user has exceeded an allotted request count. Try again later."
+
+
+class RequestHeaderFieldsTooLarge(HTTPException):
+    """*431* `Request Header Fields Too Large`
+
+    The server refuses to process the request because the header fields are too
+    large. One or more individual fields may be too large, or the set of all
+    headers is too large.
+    """
+
+    code = 431
+    description = "One or more header fields exceeds the maximum size."
+
+
+class UnavailableForLegalReasons(HTTPException):
+    """*451* `Unavailable For Legal Reasons`
+
+    This status code indicates that the server is denying access to the
+    resource as a consequence of a legal demand.
+    """
+
+    code = 451
+    description = "Unavailable for legal reasons."
+
+
+class InternalServerError(HTTPException):
+    """*500* `Internal Server Error`
+
+    Raise if an internal server error occurred.  This is a good fallback if an
+    unknown error occurred in the dispatcher.
+
+    .. versionchanged:: 1.0.0
+        Added the :attr:`original_exception` attribute.
+    """
+
+    code = 500
+    description = (
+        "The server encountered an internal error and was unable to"
+        " complete your request. Either the server is overloaded or"
+        " there is an error in the application."
+    )
+
+    def __init__(
+        self,
+        description: t.Optional[str] = None,
+        response: t.Optional["Response"] = None,
+        original_exception: t.Optional[BaseException] = None,
+    ) -> None:
+        #: The original exception that caused this 500 error. Can be
+        #: used by frameworks to provide context when handling
+        #: unexpected errors.
+        self.original_exception = original_exception
+        super().__init__(description=description, response=response)
+
+
+class NotImplemented(HTTPException):
+    """*501* `Not Implemented`
+
+    Raise if the application does not support the action requested by the
+    browser.
+    """
+
+    code = 501
+    description = "The server does not support the action requested by the browser."
+
+
+class BadGateway(HTTPException):
+    """*502* `Bad Gateway`
+
+    If you do proxying in your application you should return this status code
+    if you received an invalid response from the upstream server it accessed
+    in attempting to fulfill the request.
+    """
+
+    code = 502
+    description = (
+        "The proxy server received an invalid response from an upstream server."
+    )
+
+
+class ServiceUnavailable(_RetryAfter):
+    """*503* `Service Unavailable`
+
+    Status code you should return if a service is temporarily
+    unavailable.
+
+    :param retry_after: If given, set the ``Retry-After`` header to this
+        value. May be an :class:`int` number of seconds or a
+        :class:`~datetime.datetime`.
+
+    .. versionchanged:: 1.0
+        Added ``retry_after`` parameter.
+    """
+
+    code = 503
+    description = (
+        "The server is temporarily unable to service your request due"
+        " to maintenance downtime or capacity problems. Please try"
+        " again later."
+    )
+
+
+class GatewayTimeout(HTTPException):
+    """*504* `Gateway Timeout`
+
+    Status code you should return if a connection to an upstream server
+    times out.
+    """
+
+    code = 504
+    description = "The connection to an upstream server timed out."
+
+
+class HTTPVersionNotSupported(HTTPException):
+    """*505* `HTTP Version Not Supported`
+
+    The server does not support the HTTP protocol version used in the request.
+    """
+
+    code = 505
+    description = (
+        "The server does not support the HTTP protocol version used in the request."
+    )
+
+
+default_exceptions: t.Dict[int, t.Type[HTTPException]] = {}
+
+
+def _find_exceptions() -> None:
+    for obj in globals().values():
+        try:
+            is_http_exception = issubclass(obj, HTTPException)
+        except TypeError:
+            is_http_exception = False
+        if not is_http_exception or obj.code is None:
+            continue
+        old_obj = default_exceptions.get(obj.code, None)
+        if old_obj is not None and issubclass(obj, old_obj):
+            continue
+        default_exceptions[obj.code] = obj
+
+
+_find_exceptions()
+del _find_exceptions
+
+
+class Aborter:
+    """When passed a dict of code -> exception items it can be used as
+    callable that raises exceptions.  If the first argument to the
+    callable is an integer it will be looked up in the mapping, if it's
+    a WSGI application it will be raised in a proxy exception.
+
+    The rest of the arguments are forwarded to the exception constructor.
+    """
+
+    def __init__(
+        self,
+        mapping: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None,
+        extra: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None,
+    ) -> None:
+        if mapping is None:
+            mapping = default_exceptions
+        self.mapping = dict(mapping)
+        if extra is not None:
+            self.mapping.update(extra)
+
+    def __call__(
+        self, code: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any
+    ) -> "te.NoReturn":
+        from .sansio.response import Response
+
+        if isinstance(code, Response):
+            raise HTTPException(response=code)
+
+        if code not in self.mapping:
+            raise LookupError(f"no exception for {code!r}")
+
+        raise self.mapping[code](*args, **kwargs)
+
+
+def abort(
+    status: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any
+) -> "te.NoReturn":
+    """Raises an :py:exc:`HTTPException` for the given status code or WSGI
+    application.
+
+    If a status code is given, it will be looked up in the list of
+    exceptions and will raise that exception.  If passed a WSGI application,
+    it will wrap it in a proxy WSGI exception and raise that::
+
+       abort(404)  # 404 Not Found
+       abort(Response('Hello World'))
+
+    """
+    _aborter(status, *args, **kwargs)
+
+
+_aborter: Aborter = Aborter()
diff --git a/venv/lib/python3.7/site-packages/werkzeug/filesystem.py b/venv/lib/python3.7/site-packages/werkzeug/filesystem.py
new file mode 100644
index 00000000..36a3d12e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/filesystem.py
@@ -0,0 +1,55 @@
+import codecs
+import sys
+import typing as t
+import warnings
+
+# We do not trust traditional unixes.
+has_likely_buggy_unicode_filesystem = (
+    sys.platform.startswith("linux") or "bsd" in sys.platform
+)
+
+
+def _is_ascii_encoding(encoding: t.Optional[str]) -> bool:
+    """Given an encoding this figures out if the encoding is actually ASCII (which
+    is something we don't actually want in most cases). This is necessary
+    because ASCII comes under many names such as ANSI_X3.4-1968.
+    """
+    if encoding is None:
+        return False
+    try:
+        return codecs.lookup(encoding).name == "ascii"
+    except LookupError:
+        return False
+
+
+class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning):
+    """The warning used by Werkzeug to signal a broken filesystem. Will only be
+    used once per runtime."""
+
+
+_warned_about_filesystem_encoding = False
+
+
+def get_filesystem_encoding() -> str:
+    """Returns the filesystem encoding that should be used. Note that this is
+    different from the Python understanding of the filesystem encoding which
+    might be deeply flawed. Do not use this value against Python's string APIs
+    because it might be different. See :ref:`filesystem-encoding` for the exact
+    behavior.
+
+    The concept of a filesystem encoding in generally is not something you
+    should rely on. As such if you ever need to use this function except for
+    writing wrapper code reconsider.
+    """
+    global _warned_about_filesystem_encoding
+    rv = sys.getfilesystemencoding()
+    if has_likely_buggy_unicode_filesystem and not rv or _is_ascii_encoding(rv):
+        if not _warned_about_filesystem_encoding:
+            warnings.warn(
+                "Detected a misconfigured UNIX filesystem: Will use"
+                f" UTF-8 as filesystem encoding instead of {rv!r}",
+                BrokenFilesystemWarning,
+            )
+            _warned_about_filesystem_encoding = True
+        return "utf-8"
+    return rv
diff --git a/venv/lib/python3.7/site-packages/werkzeug/formparser.py b/venv/lib/python3.7/site-packages/werkzeug/formparser.py
new file mode 100644
index 00000000..2dcb709f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/formparser.py
@@ -0,0 +1,495 @@
+import typing as t
+import warnings
+from functools import update_wrapper
+from io import BytesIO
+from itertools import chain
+from typing import Union
+
+from . import exceptions
+from ._internal import _to_str
+from .datastructures import FileStorage
+from .datastructures import Headers
+from .datastructures import MultiDict
+from .http import parse_options_header
+from .sansio.multipart import Data
+from .sansio.multipart import Epilogue
+from .sansio.multipart import Field
+from .sansio.multipart import File
+from .sansio.multipart import MultipartDecoder
+from .sansio.multipart import NeedData
+from .urls import url_decode_stream
+from .wsgi import _make_chunk_iter
+from .wsgi import get_content_length
+from .wsgi import get_input_stream
+
+# there are some platforms where SpooledTemporaryFile is not available.
+# In that case we need to provide a fallback.
+try:
+    from tempfile import SpooledTemporaryFile
+except ImportError:
+    from tempfile import TemporaryFile
+
+    SpooledTemporaryFile = None  # type: ignore
+
+if t.TYPE_CHECKING:
+    import typing as te
+    from _typeshed.wsgi import WSGIEnvironment
+
+    t_parse_result = t.Tuple[t.BinaryIO, MultiDict, MultiDict]
+
+    class TStreamFactory(te.Protocol):
+        def __call__(
+            self,
+            total_content_length: t.Optional[int],
+            content_type: t.Optional[str],
+            filename: t.Optional[str],
+            content_length: t.Optional[int] = None,
+        ) -> t.BinaryIO:
+            ...
+
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+
+def _exhaust(stream: t.BinaryIO) -> None:
+    bts = stream.read(64 * 1024)
+    while bts:
+        bts = stream.read(64 * 1024)
+
+
+def default_stream_factory(
+    total_content_length: t.Optional[int],
+    content_type: t.Optional[str],
+    filename: t.Optional[str],
+    content_length: t.Optional[int] = None,
+) -> t.BinaryIO:
+    max_size = 1024 * 500
+
+    if SpooledTemporaryFile is not None:
+        return t.cast(t.BinaryIO, SpooledTemporaryFile(max_size=max_size, mode="rb+"))
+    elif total_content_length is None or total_content_length > max_size:
+        return t.cast(t.BinaryIO, TemporaryFile("rb+"))
+
+    return BytesIO()
+
+
+def parse_form_data(
+    environ: "WSGIEnvironment",
+    stream_factory: t.Optional["TStreamFactory"] = None,
+    charset: str = "utf-8",
+    errors: str = "replace",
+    max_form_memory_size: t.Optional[int] = None,
+    max_content_length: t.Optional[int] = None,
+    cls: t.Optional[t.Type[MultiDict]] = None,
+    silent: bool = True,
+) -> "t_parse_result":
+    """Parse the form data in the environ and return it as tuple in the form
+    ``(stream, form, files)``.  You should only call this method if the
+    transport method is `POST`, `PUT`, or `PATCH`.
+
+    If the mimetype of the data transmitted is `multipart/form-data` the
+    files multidict will be filled with `FileStorage` objects.  If the
+    mimetype is unknown the input stream is wrapped and returned as first
+    argument, else the stream is empty.
+
+    This is a shortcut for the common usage of :class:`FormDataParser`.
+
+    Have a look at :doc:`/request_data` for more details.
+
+    .. versionadded:: 0.5
+       The `max_form_memory_size`, `max_content_length` and
+       `cls` parameters were added.
+
+    .. versionadded:: 0.5.1
+       The optional `silent` flag was added.
+
+    :param environ: the WSGI environment to be used for parsing.
+    :param stream_factory: An optional callable that returns a new read and
+                           writeable file descriptor.  This callable works
+                           the same as :meth:`Response._get_file_stream`.
+    :param charset: The character set for URL and url encoded form data.
+    :param errors: The encoding error behavior.
+    :param max_form_memory_size: the maximum number of bytes to be accepted for
+                           in-memory stored form data.  If the data
+                           exceeds the value specified an
+                           :exc:`~exceptions.RequestEntityTooLarge`
+                           exception is raised.
+    :param max_content_length: If this is provided and the transmitted data
+                               is longer than this value an
+                               :exc:`~exceptions.RequestEntityTooLarge`
+                               exception is raised.
+    :param cls: an optional dict class to use.  If this is not specified
+                       or `None` the default :class:`MultiDict` is used.
+    :param silent: If set to False parsing errors will not be caught.
+    :return: A tuple in the form ``(stream, form, files)``.
+    """
+    return FormDataParser(
+        stream_factory,
+        charset,
+        errors,
+        max_form_memory_size,
+        max_content_length,
+        cls,
+        silent,
+    ).parse_from_environ(environ)
+
+
+def exhaust_stream(f: F) -> F:
+    """Helper decorator for methods that exhausts the stream on return."""
+
+    def wrapper(self, stream, *args, **kwargs):  # type: ignore
+        try:
+            return f(self, stream, *args, **kwargs)
+        finally:
+            exhaust = getattr(stream, "exhaust", None)
+
+            if exhaust is not None:
+                exhaust()
+            else:
+                while True:
+                    chunk = stream.read(1024 * 64)
+
+                    if not chunk:
+                        break
+
+    return update_wrapper(t.cast(F, wrapper), f)
+
+
+class FormDataParser:
+    """This class implements parsing of form data for Werkzeug.  By itself
+    it can parse multipart and url encoded form data.  It can be subclassed
+    and extended but for most mimetypes it is a better idea to use the
+    untouched stream and expose it as separate attributes on a request
+    object.
+
+    .. versionadded:: 0.8
+
+    :param stream_factory: An optional callable that returns a new read and
+                           writeable file descriptor.  This callable works
+                           the same as :meth:`Response._get_file_stream`.
+    :param charset: The character set for URL and url encoded form data.
+    :param errors: The encoding error behavior.
+    :param max_form_memory_size: the maximum number of bytes to be accepted for
+                           in-memory stored form data.  If the data
+                           exceeds the value specified an
+                           :exc:`~exceptions.RequestEntityTooLarge`
+                           exception is raised.
+    :param max_content_length: If this is provided and the transmitted data
+                               is longer than this value an
+                               :exc:`~exceptions.RequestEntityTooLarge`
+                               exception is raised.
+    :param cls: an optional dict class to use.  If this is not specified
+                       or `None` the default :class:`MultiDict` is used.
+    :param silent: If set to False parsing errors will not be caught.
+    """
+
+    def __init__(
+        self,
+        stream_factory: t.Optional["TStreamFactory"] = None,
+        charset: str = "utf-8",
+        errors: str = "replace",
+        max_form_memory_size: t.Optional[int] = None,
+        max_content_length: t.Optional[int] = None,
+        cls: t.Optional[t.Type[MultiDict]] = None,
+        silent: bool = True,
+    ) -> None:
+        if stream_factory is None:
+            stream_factory = default_stream_factory
+
+        self.stream_factory = stream_factory
+        self.charset = charset
+        self.errors = errors
+        self.max_form_memory_size = max_form_memory_size
+        self.max_content_length = max_content_length
+
+        if cls is None:
+            cls = MultiDict
+
+        self.cls = cls
+        self.silent = silent
+
+    def get_parse_func(
+        self, mimetype: str, options: t.Dict[str, str]
+    ) -> t.Optional[
+        t.Callable[
+            ["FormDataParser", t.BinaryIO, str, t.Optional[int], t.Dict[str, str]],
+            "t_parse_result",
+        ]
+    ]:
+        return self.parse_functions.get(mimetype)
+
+    def parse_from_environ(self, environ: "WSGIEnvironment") -> "t_parse_result":
+        """Parses the information from the environment as form data.
+
+        :param environ: the WSGI environment to be used for parsing.
+        :return: A tuple in the form ``(stream, form, files)``.
+        """
+        content_type = environ.get("CONTENT_TYPE", "")
+        content_length = get_content_length(environ)
+        mimetype, options = parse_options_header(content_type)
+        return self.parse(get_input_stream(environ), mimetype, content_length, options)
+
+    def parse(
+        self,
+        stream: t.BinaryIO,
+        mimetype: str,
+        content_length: t.Optional[int],
+        options: t.Optional[t.Dict[str, str]] = None,
+    ) -> "t_parse_result":
+        """Parses the information from the given stream, mimetype,
+        content length and mimetype parameters.
+
+        :param stream: an input stream
+        :param mimetype: the mimetype of the data
+        :param content_length: the content length of the incoming data
+        :param options: optional mimetype parameters (used for
+                        the multipart boundary for instance)
+        :return: A tuple in the form ``(stream, form, files)``.
+        """
+        if (
+            self.max_content_length is not None
+            and content_length is not None
+            and content_length > self.max_content_length
+        ):
+            # if the input stream is not exhausted, firefox reports Connection Reset
+            _exhaust(stream)
+            raise exceptions.RequestEntityTooLarge()
+
+        if options is None:
+            options = {}
+
+        parse_func = self.get_parse_func(mimetype, options)
+
+        if parse_func is not None:
+            try:
+                return parse_func(self, stream, mimetype, content_length, options)
+            except ValueError:
+                if not self.silent:
+                    raise
+
+        return stream, self.cls(), self.cls()
+
+    @exhaust_stream
+    def _parse_multipart(
+        self,
+        stream: t.BinaryIO,
+        mimetype: str,
+        content_length: t.Optional[int],
+        options: t.Dict[str, str],
+    ) -> "t_parse_result":
+        parser = MultiPartParser(
+            self.stream_factory,
+            self.charset,
+            self.errors,
+            max_form_memory_size=self.max_form_memory_size,
+            cls=self.cls,
+        )
+        boundary = options.get("boundary", "").encode("ascii")
+
+        if not boundary:
+            raise ValueError("Missing boundary")
+
+        form, files = parser.parse(stream, boundary, content_length)
+        return stream, form, files
+
+    @exhaust_stream
+    def _parse_urlencoded(
+        self,
+        stream: t.BinaryIO,
+        mimetype: str,
+        content_length: t.Optional[int],
+        options: t.Dict[str, str],
+    ) -> "t_parse_result":
+        if (
+            self.max_form_memory_size is not None
+            and content_length is not None
+            and content_length > self.max_form_memory_size
+        ):
+            # if the input stream is not exhausted, firefox reports Connection Reset
+            _exhaust(stream)
+            raise exceptions.RequestEntityTooLarge()
+
+        form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls)
+        return stream, form, self.cls()
+
+    #: mapping of mimetypes to parsing functions
+    parse_functions: t.Dict[
+        str,
+        t.Callable[
+            ["FormDataParser", t.BinaryIO, str, t.Optional[int], t.Dict[str, str]],
+            "t_parse_result",
+        ],
+    ] = {
+        "multipart/form-data": _parse_multipart,
+        "application/x-www-form-urlencoded": _parse_urlencoded,
+        "application/x-url-encoded": _parse_urlencoded,
+    }
+
+
+def _line_parse(line: str) -> t.Tuple[str, bool]:
+    """Removes line ending characters and returns a tuple (`stripped_line`,
+    `is_terminated`).
+    """
+    if line[-2:] == "\r\n":
+        return line[:-2], True
+
+    elif line[-1:] in {"\r", "\n"}:
+        return line[:-1], True
+
+    return line, False
+
+
+def parse_multipart_headers(iterable: t.Iterable[bytes]) -> Headers:
+    """Parses multipart headers from an iterable that yields lines (including
+    the trailing newline symbol).  The iterable has to be newline terminated.
+    The iterable will stop at the line where the headers ended so it can be
+    further consumed.
+    :param iterable: iterable of strings that are newline terminated
+    """
+    warnings.warn(
+        "'parse_multipart_headers' is deprecated and will be removed in"
+        " Werkzeug 2.1.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    result: t.List[t.Tuple[str, str]] = []
+
+    for b_line in iterable:
+        line = _to_str(b_line)
+        line, line_terminated = _line_parse(line)
+
+        if not line_terminated:
+            raise ValueError("unexpected end of line in multipart header")
+
+        if not line:
+            break
+        elif line[0] in " \t" and result:
+            key, value = result[-1]
+            result[-1] = (key, f"{value}\n {line[1:]}")
+        else:
+            parts = line.split(":", 1)
+
+            if len(parts) == 2:
+                result.append((parts[0].strip(), parts[1].strip()))
+
+    # we link the list to the headers, no need to create a copy, the
+    # list was not shared anyways.
+    return Headers(result)
+
+
+class MultiPartParser:
+    def __init__(
+        self,
+        stream_factory: t.Optional["TStreamFactory"] = None,
+        charset: str = "utf-8",
+        errors: str = "replace",
+        max_form_memory_size: t.Optional[int] = None,
+        cls: t.Optional[t.Type[MultiDict]] = None,
+        buffer_size: int = 64 * 1024,
+    ) -> None:
+        self.charset = charset
+        self.errors = errors
+        self.max_form_memory_size = max_form_memory_size
+
+        if stream_factory is None:
+            stream_factory = default_stream_factory
+
+        self.stream_factory = stream_factory
+
+        if cls is None:
+            cls = MultiDict
+
+        self.cls = cls
+
+        self.buffer_size = buffer_size
+
+    def fail(self, message: str) -> "te.NoReturn":
+        raise ValueError(message)
+
+    def get_part_charset(self, headers: Headers) -> str:
+        # Figure out input charset for current part
+        content_type = headers.get("content-type")
+
+        if content_type:
+            mimetype, ct_params = parse_options_header(content_type)
+            return ct_params.get("charset", self.charset)
+
+        return self.charset
+
+    def start_file_streaming(
+        self, event: File, total_content_length: t.Optional[int]
+    ) -> t.BinaryIO:
+        content_type = event.headers.get("content-type")
+
+        try:
+            content_length = int(event.headers["content-length"])
+        except (KeyError, ValueError):
+            content_length = 0
+
+        container = self.stream_factory(
+            total_content_length=total_content_length,
+            filename=event.filename,
+            content_type=content_type,
+            content_length=content_length,
+        )
+        return container
+
+    def parse(
+        self, stream: t.BinaryIO, boundary: bytes, content_length: t.Optional[int]
+    ) -> t.Tuple[MultiDict, MultiDict]:
+        container: t.Union[t.BinaryIO, t.List[bytes]]
+        _write: t.Callable[[bytes], t.Any]
+
+        iterator = chain(
+            _make_chunk_iter(
+                stream,
+                limit=content_length,
+                buffer_size=self.buffer_size,
+            ),
+            [None],
+        )
+
+        parser = MultipartDecoder(boundary, self.max_form_memory_size)
+
+        fields = []
+        files = []
+
+        current_part: Union[Field, File]
+        for data in iterator:
+            parser.receive_data(data)
+            event = parser.next_event()
+            while not isinstance(event, (Epilogue, NeedData)):
+                if isinstance(event, Field):
+                    current_part = event
+                    container = []
+                    _write = container.append
+                elif isinstance(event, File):
+                    current_part = event
+                    container = self.start_file_streaming(event, content_length)
+                    _write = container.write
+                elif isinstance(event, Data):
+                    _write(event.data)
+                    if not event.more_data:
+                        if isinstance(current_part, Field):
+                            value = b"".join(container).decode(
+                                self.get_part_charset(current_part.headers), self.errors
+                            )
+                            fields.append((current_part.name, value))
+                        else:
+                            container = t.cast(t.BinaryIO, container)
+                            container.seek(0)
+                            files.append(
+                                (
+                                    current_part.name,
+                                    FileStorage(
+                                        container,
+                                        current_part.filename,
+                                        current_part.name,
+                                        headers=current_part.headers,
+                                    ),
+                                )
+                            )
+
+                event = parser.next_event()
+
+        return self.cls(fields), self.cls(files)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/http.py b/venv/lib/python3.7/site-packages/werkzeug/http.py
new file mode 100644
index 00000000..ca48fe21
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/http.py
@@ -0,0 +1,1388 @@
+import base64
+import email.utils
+import re
+import typing
+import typing as t
+import warnings
+from datetime import date
+from datetime import datetime
+from datetime import time
+from datetime import timedelta
+from datetime import timezone
+from enum import Enum
+from hashlib import sha1
+from time import mktime
+from time import struct_time
+from urllib.parse import unquote_to_bytes as _unquote
+from urllib.request import parse_http_list as _parse_list_header
+
+from ._internal import _cookie_parse_impl
+from ._internal import _cookie_quote
+from ._internal import _make_cookie_domain
+from ._internal import _to_bytes
+from ._internal import _to_str
+from ._internal import _wsgi_decoding_dance
+from werkzeug._internal import _dt_as_utc
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from _typeshed.wsgi import WSGIEnvironment
+
+# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231
+_accept_re = re.compile(
+    r"""
+    (                       # media-range capturing-parenthesis
+      [^\s;,]+              # type/subtype
+      (?:[ \t]*;[ \t]*      # ";"
+        (?:                 # parameter non-capturing-parenthesis
+          [^\s;,q][^\s;,]*  # token that doesn't start with "q"
+        |                   # or
+          q[^\s;,=][^\s;,]* # token that is more than just "q"
+        )
+      )*                    # zero or more parameters
+    )                       # end of media-range
+    (?:[ \t]*;[ \t]*q=      # weight is a "q" parameter
+      (\d*(?:\.\d+)?)       # qvalue capturing-parentheses
+      [^,]*                 # "extension" accept params: who cares?
+    )?                      # accept params are optional
+    """,
+    re.VERBOSE,
+)
+_token_chars = frozenset(
+    "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~"
+)
+_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)')
+_option_header_piece_re = re.compile(
+    r"""
+    ;\s*,?\s*  # newlines were replaced with commas
+    (?P<key>
+        "[^"\\]*(?:\\.[^"\\]*)*"  # quoted string
+    |
+        [^\s;,=*]+  # token
+    )
+    (?:\*(?P<count>\d+))?  # *1, optional continuation index
+    \s*
+    (?:  # optionally followed by =value
+        (?:  # equals sign, possibly with encoding
+            \*\s*=\s*  # * indicates extended notation
+            (?:  # optional encoding
+                (?P<encoding>[^\s]+?)
+                '(?P<language>[^\s]*?)'
+            )?
+        |
+            =\s*  # basic notation
+        )
+        (?P<value>
+            "[^"\\]*(?:\\.[^"\\]*)*"  # quoted string
+        |
+            [^;,]+  # token
+        )?
+    )?
+    \s*
+    """,
+    flags=re.VERBOSE,
+)
+_option_header_start_mime_type = re.compile(r",\s*([^;,\s]+)([;,]\s*.+)?")
+_entity_headers = frozenset(
+    [
+        "allow",
+        "content-encoding",
+        "content-language",
+        "content-length",
+        "content-location",
+        "content-md5",
+        "content-range",
+        "content-type",
+        "expires",
+        "last-modified",
+    ]
+)
+_hop_by_hop_headers = frozenset(
+    [
+        "connection",
+        "keep-alive",
+        "proxy-authenticate",
+        "proxy-authorization",
+        "te",
+        "trailer",
+        "transfer-encoding",
+        "upgrade",
+    ]
+)
+HTTP_STATUS_CODES = {
+    100: "Continue",
+    101: "Switching Protocols",
+    102: "Processing",
+    103: "Early Hints",  # see RFC 8297
+    200: "OK",
+    201: "Created",
+    202: "Accepted",
+    203: "Non Authoritative Information",
+    204: "No Content",
+    205: "Reset Content",
+    206: "Partial Content",
+    207: "Multi Status",
+    208: "Already Reported",  # see RFC 5842
+    226: "IM Used",  # see RFC 3229
+    300: "Multiple Choices",
+    301: "Moved Permanently",
+    302: "Found",
+    303: "See Other",
+    304: "Not Modified",
+    305: "Use Proxy",
+    306: "Switch Proxy",  # unused
+    307: "Temporary Redirect",
+    308: "Permanent Redirect",
+    400: "Bad Request",
+    401: "Unauthorized",
+    402: "Payment Required",  # unused
+    403: "Forbidden",
+    404: "Not Found",
+    405: "Method Not Allowed",
+    406: "Not Acceptable",
+    407: "Proxy Authentication Required",
+    408: "Request Timeout",
+    409: "Conflict",
+    410: "Gone",
+    411: "Length Required",
+    412: "Precondition Failed",
+    413: "Request Entity Too Large",
+    414: "Request URI Too Long",
+    415: "Unsupported Media Type",
+    416: "Requested Range Not Satisfiable",
+    417: "Expectation Failed",
+    418: "I'm a teapot",  # see RFC 2324
+    421: "Misdirected Request",  # see RFC 7540
+    422: "Unprocessable Entity",
+    423: "Locked",
+    424: "Failed Dependency",
+    425: "Too Early",  # see RFC 8470
+    426: "Upgrade Required",
+    428: "Precondition Required",  # see RFC 6585
+    429: "Too Many Requests",
+    431: "Request Header Fields Too Large",
+    449: "Retry With",  # proprietary MS extension
+    451: "Unavailable For Legal Reasons",
+    500: "Internal Server Error",
+    501: "Not Implemented",
+    502: "Bad Gateway",
+    503: "Service Unavailable",
+    504: "Gateway Timeout",
+    505: "HTTP Version Not Supported",
+    506: "Variant Also Negotiates",  # see RFC 2295
+    507: "Insufficient Storage",
+    508: "Loop Detected",  # see RFC 5842
+    510: "Not Extended",
+    511: "Network Authentication Failed",
+}
+
+
+class COEP(Enum):
+    """Cross Origin Embedder Policies"""
+
+    UNSAFE_NONE = "unsafe-none"
+    REQUIRE_CORP = "require-corp"
+
+
+class COOP(Enum):
+    """Cross Origin Opener Policies"""
+
+    UNSAFE_NONE = "unsafe-none"
+    SAME_ORIGIN_ALLOW_POPUPS = "same-origin-allow-popups"
+    SAME_ORIGIN = "same-origin"
+
+
+def quote_header_value(
+    value: t.Union[str, int], extra_chars: str = "", allow_token: bool = True
+) -> str:
+    """Quote a header value if necessary.
+
+    .. versionadded:: 0.5
+
+    :param value: the value to quote.
+    :param extra_chars: a list of extra characters to skip quoting.
+    :param allow_token: if this is enabled token values are returned
+                        unchanged.
+    """
+    if isinstance(value, bytes):
+        value = value.decode("latin1")
+    value = str(value)
+    if allow_token:
+        token_chars = _token_chars | set(extra_chars)
+        if set(value).issubset(token_chars):
+            return value
+    value = value.replace("\\", "\\\\").replace('"', '\\"')
+    return f'"{value}"'
+
+
+def unquote_header_value(value: str, is_filename: bool = False) -> str:
+    r"""Unquotes a header value.  (Reversal of :func:`quote_header_value`).
+    This does not use the real unquoting but what browsers are actually
+    using for quoting.
+
+    .. versionadded:: 0.5
+
+    :param value: the header value to unquote.
+    :param is_filename: The value represents a filename or path.
+    """
+    if value and value[0] == value[-1] == '"':
+        # this is not the real unquoting, but fixing this so that the
+        # RFC is met will result in bugs with internet explorer and
+        # probably some other browsers as well.  IE for example is
+        # uploading files with "C:\foo\bar.txt" as filename
+        value = value[1:-1]
+
+        # if this is a filename and the starting characters look like
+        # a UNC path, then just return the value without quotes.  Using the
+        # replace sequence below on a UNC path has the effect of turning
+        # the leading double slash into a single slash and then
+        # _fix_ie_filename() doesn't work correctly.  See #458.
+        if not is_filename or value[:2] != "\\\\":
+            return value.replace("\\\\", "\\").replace('\\"', '"')
+    return value
+
+
+def dump_options_header(
+    header: t.Optional[str], options: t.Mapping[str, t.Optional[t.Union[str, int]]]
+) -> str:
+    """The reverse function to :func:`parse_options_header`.
+
+    :param header: the header to dump
+    :param options: a dict of options to append.
+    """
+    segments = []
+    if header is not None:
+        segments.append(header)
+    for key, value in options.items():
+        if value is None:
+            segments.append(key)
+        else:
+            segments.append(f"{key}={quote_header_value(value)}")
+    return "; ".join(segments)
+
+
+def dump_header(
+    iterable: t.Union[t.Dict[str, t.Union[str, int]], t.Iterable[str]],
+    allow_token: bool = True,
+) -> str:
+    """Dump an HTTP header again.  This is the reversal of
+    :func:`parse_list_header`, :func:`parse_set_header` and
+    :func:`parse_dict_header`.  This also quotes strings that include an
+    equals sign unless you pass it as dict of key, value pairs.
+
+    >>> dump_header({'foo': 'bar baz'})
+    'foo="bar baz"'
+    >>> dump_header(('foo', 'bar baz'))
+    'foo, "bar baz"'
+
+    :param iterable: the iterable or dict of values to quote.
+    :param allow_token: if set to `False` tokens as values are disallowed.
+                        See :func:`quote_header_value` for more details.
+    """
+    if isinstance(iterable, dict):
+        items = []
+        for key, value in iterable.items():
+            if value is None:
+                items.append(key)
+            else:
+                items.append(
+                    f"{key}={quote_header_value(value, allow_token=allow_token)}"
+                )
+    else:
+        items = [quote_header_value(x, allow_token=allow_token) for x in iterable]
+    return ", ".join(items)
+
+
+def dump_csp_header(header: "ds.ContentSecurityPolicy") -> str:
+    """Dump a Content Security Policy header.
+
+    These are structured into policies such as "default-src 'self';
+    script-src 'self'".
+
+    .. versionadded:: 1.0.0
+       Support for Content Security Policy headers was added.
+
+    """
+    return "; ".join(f"{key} {value}" for key, value in header.items())
+
+
+def parse_list_header(value: str) -> t.List[str]:
+    """Parse lists as described by RFC 2068 Section 2.
+
+    In particular, parse comma-separated lists where the elements of
+    the list may include quoted-strings.  A quoted-string could
+    contain a comma.  A non-quoted string could have quotes in the
+    middle.  Quotes are removed automatically after parsing.
+
+    It basically works like :func:`parse_set_header` just that items
+    may appear multiple times and case sensitivity is preserved.
+
+    The return value is a standard :class:`list`:
+
+    >>> parse_list_header('token, "quoted value"')
+    ['token', 'quoted value']
+
+    To create a header from the :class:`list` again, use the
+    :func:`dump_header` function.
+
+    :param value: a string with a list header.
+    :return: :class:`list`
+    """
+    result = []
+    for item in _parse_list_header(value):
+        if item[:1] == item[-1:] == '"':
+            item = unquote_header_value(item[1:-1])
+        result.append(item)
+    return result
+
+
+def parse_dict_header(value: str, cls: t.Type[dict] = dict) -> t.Dict[str, str]:
+    """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+    convert them into a python dict (or any other mapping object created from
+    the type with a dict like interface provided by the `cls` argument):
+
+    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+    >>> type(d) is dict
+    True
+    >>> sorted(d.items())
+    [('bar', 'as well'), ('foo', 'is a fish')]
+
+    If there is no value for a key it will be `None`:
+
+    >>> parse_dict_header('key_without_value')
+    {'key_without_value': None}
+
+    To create a header from the :class:`dict` again, use the
+    :func:`dump_header` function.
+
+    .. versionchanged:: 0.9
+       Added support for `cls` argument.
+
+    :param value: a string with a dict header.
+    :param cls: callable to use for storage of parsed results.
+    :return: an instance of `cls`
+    """
+    result = cls()
+    if isinstance(value, bytes):
+        value = value.decode("latin1")
+    for item in _parse_list_header(value):
+        if "=" not in item:
+            result[item] = None
+            continue
+        name, value = item.split("=", 1)
+        if value[:1] == value[-1:] == '"':
+            value = unquote_header_value(value[1:-1])
+        result[name] = value
+    return result
+
+
+@typing.overload
+def parse_options_header(
+    value: t.Optional[str], multiple: "te.Literal[False]" = False
+) -> t.Tuple[str, t.Dict[str, str]]:
+    ...
+
+
+@typing.overload
+def parse_options_header(
+    value: t.Optional[str], multiple: "te.Literal[True]"
+) -> t.Tuple[t.Any, ...]:
+    ...
+
+
+def parse_options_header(
+    value: t.Optional[str], multiple: bool = False
+) -> t.Union[t.Tuple[str, t.Dict[str, str]], t.Tuple[t.Any, ...]]:
+    """Parse a ``Content-Type`` like header into a tuple with the content
+    type and the options:
+
+    >>> parse_options_header('text/html; charset=utf8')
+    ('text/html', {'charset': 'utf8'})
+
+    This should not be used to parse ``Cache-Control`` like headers that use
+    a slightly different format.  For these headers use the
+    :func:`parse_dict_header` function.
+
+    .. versionchanged:: 0.15
+        :rfc:`2231` parameter continuations are handled.
+
+    .. versionadded:: 0.5
+
+    :param value: the header to parse.
+    :param multiple: Whether try to parse and return multiple MIME types
+    :return: (mimetype, options) or (mimetype, options, mimetype, options, …)
+             if multiple=True
+    """
+    if not value:
+        return "", {}
+
+    result: t.List[t.Any] = []
+
+    value = "," + value.replace("\n", ",")
+    while value:
+        match = _option_header_start_mime_type.match(value)
+        if not match:
+            break
+        result.append(match.group(1))  # mimetype
+        options: t.Dict[str, str] = {}
+        # Parse options
+        rest = match.group(2)
+        encoding: t.Optional[str]
+        continued_encoding: t.Optional[str] = None
+        while rest:
+            optmatch = _option_header_piece_re.match(rest)
+            if not optmatch:
+                break
+            option, count, encoding, language, option_value = optmatch.groups()
+            # Continuations don't have to supply the encoding after the
+            # first line. If we're in a continuation, track the current
+            # encoding to use for subsequent lines. Reset it when the
+            # continuation ends.
+            if not count:
+                continued_encoding = None
+            else:
+                if not encoding:
+                    encoding = continued_encoding
+                continued_encoding = encoding
+            option = unquote_header_value(option)
+            if option_value is not None:
+                option_value = unquote_header_value(option_value, option == "filename")
+                if encoding is not None:
+                    option_value = _unquote(option_value).decode(encoding)
+            if count:
+                # Continuations append to the existing value. For
+                # simplicity, this ignores the possibility of
+                # out-of-order indices, which shouldn't happen anyway.
+                options[option] = options.get(option, "") + option_value
+            else:
+                options[option] = option_value
+            rest = rest[optmatch.end() :]
+        result.append(options)
+        if multiple is False:
+            return tuple(result)
+        value = rest
+
+    return tuple(result) if result else ("", {})
+
+
+_TAnyAccept = t.TypeVar("_TAnyAccept", bound="ds.Accept")
+
+
+@typing.overload
+def parse_accept_header(value: t.Optional[str]) -> "ds.Accept":
+    ...
+
+
+@typing.overload
+def parse_accept_header(
+    value: t.Optional[str], cls: t.Type[_TAnyAccept]
+) -> _TAnyAccept:
+    ...
+
+
+def parse_accept_header(
+    value: t.Optional[str], cls: t.Optional[t.Type[_TAnyAccept]] = None
+) -> _TAnyAccept:
+    """Parses an HTTP Accept-* header.  This does not implement a complete
+    valid algorithm but one that supports at least value and quality
+    extraction.
+
+    Returns a new :class:`Accept` object (basically a list of ``(value, quality)``
+    tuples sorted by the quality with some additional accessor methods).
+
+    The second parameter can be a subclass of :class:`Accept` that is created
+    with the parsed values and returned.
+
+    :param value: the accept header string to be parsed.
+    :param cls: the wrapper class for the return value (can be
+                         :class:`Accept` or a subclass thereof)
+    :return: an instance of `cls`.
+    """
+    if cls is None:
+        cls = t.cast(t.Type[_TAnyAccept], ds.Accept)
+
+    if not value:
+        return cls(None)
+
+    result = []
+    for match in _accept_re.finditer(value):
+        quality_match = match.group(2)
+        if not quality_match:
+            quality: float = 1
+        else:
+            quality = max(min(float(quality_match), 1), 0)
+        result.append((match.group(1), quality))
+    return cls(result)
+
+
+_TAnyCC = t.TypeVar("_TAnyCC", bound="ds._CacheControl")
+_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]]
+
+
+@typing.overload
+def parse_cache_control_header(
+    value: t.Optional[str], on_update: _t_cc_update, cls: None = None
+) -> "ds.RequestCacheControl":
+    ...
+
+
+@typing.overload
+def parse_cache_control_header(
+    value: t.Optional[str], on_update: _t_cc_update, cls: t.Type[_TAnyCC]
+) -> _TAnyCC:
+    ...
+
+
+def parse_cache_control_header(
+    value: t.Optional[str],
+    on_update: _t_cc_update = None,
+    cls: t.Optional[t.Type[_TAnyCC]] = None,
+) -> _TAnyCC:
+    """Parse a cache control header.  The RFC differs between response and
+    request cache control, this method does not.  It's your responsibility
+    to not use the wrong control statements.
+
+    .. versionadded:: 0.5
+       The `cls` was added.  If not specified an immutable
+       :class:`~werkzeug.datastructures.RequestCacheControl` is returned.
+
+    :param value: a cache control header to be parsed.
+    :param on_update: an optional callable that is called every time a value
+                      on the :class:`~werkzeug.datastructures.CacheControl`
+                      object is changed.
+    :param cls: the class for the returned object.  By default
+                :class:`~werkzeug.datastructures.RequestCacheControl` is used.
+    :return: a `cls` object.
+    """
+    if cls is None:
+        cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl)
+
+    if not value:
+        return cls((), on_update)
+
+    return cls(parse_dict_header(value), on_update)
+
+
+_TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy")
+_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]]
+
+
+@typing.overload
+def parse_csp_header(
+    value: t.Optional[str], on_update: _t_csp_update, cls: None = None
+) -> "ds.ContentSecurityPolicy":
+    ...
+
+
+@typing.overload
+def parse_csp_header(
+    value: t.Optional[str], on_update: _t_csp_update, cls: t.Type[_TAnyCSP]
+) -> _TAnyCSP:
+    ...
+
+
+def parse_csp_header(
+    value: t.Optional[str],
+    on_update: _t_csp_update = None,
+    cls: t.Optional[t.Type[_TAnyCSP]] = None,
+) -> _TAnyCSP:
+    """Parse a Content Security Policy header.
+
+    .. versionadded:: 1.0.0
+       Support for Content Security Policy headers was added.
+
+    :param value: a csp header to be parsed.
+    :param on_update: an optional callable that is called every time a value
+                      on the object is changed.
+    :param cls: the class for the returned object.  By default
+                :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used.
+    :return: a `cls` object.
+    """
+    if cls is None:
+        cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy)
+
+    if value is None:
+        return cls((), on_update)
+
+    items = []
+
+    for policy in value.split(";"):
+        policy = policy.strip()
+
+        # Ignore badly formatted policies (no space)
+        if " " in policy:
+            directive, value = policy.strip().split(" ", 1)
+            items.append((directive.strip(), value.strip()))
+
+    return cls(items, on_update)
+
+
+def parse_set_header(
+    value: t.Optional[str],
+    on_update: t.Optional[t.Callable[["ds.HeaderSet"], None]] = None,
+) -> "ds.HeaderSet":
+    """Parse a set-like header and return a
+    :class:`~werkzeug.datastructures.HeaderSet` object:
+
+    >>> hs = parse_set_header('token, "quoted value"')
+
+    The return value is an object that treats the items case-insensitively
+    and keeps the order of the items:
+
+    >>> 'TOKEN' in hs
+    True
+    >>> hs.index('quoted value')
+    1
+    >>> hs
+    HeaderSet(['token', 'quoted value'])
+
+    To create a header from the :class:`HeaderSet` again, use the
+    :func:`dump_header` function.
+
+    :param value: a set header to be parsed.
+    :param on_update: an optional callable that is called every time a
+                      value on the :class:`~werkzeug.datastructures.HeaderSet`
+                      object is changed.
+    :return: a :class:`~werkzeug.datastructures.HeaderSet`
+    """
+    if not value:
+        return ds.HeaderSet(None, on_update)
+    return ds.HeaderSet(parse_list_header(value), on_update)
+
+
+def parse_authorization_header(
+    value: t.Optional[str],
+) -> t.Optional["ds.Authorization"]:
+    """Parse an HTTP basic/digest authorization header transmitted by the web
+    browser.  The return value is either `None` if the header was invalid or
+    not given, otherwise an :class:`~werkzeug.datastructures.Authorization`
+    object.
+
+    :param value: the authorization header to parse.
+    :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`.
+    """
+    if not value:
+        return None
+    value = _wsgi_decoding_dance(value)
+    try:
+        auth_type, auth_info = value.split(None, 1)
+        auth_type = auth_type.lower()
+    except ValueError:
+        return None
+    if auth_type == "basic":
+        try:
+            username, password = base64.b64decode(auth_info).split(b":", 1)
+        except Exception:
+            return None
+        try:
+            return ds.Authorization(
+                "basic",
+                {
+                    "username": _to_str(username, "utf-8"),
+                    "password": _to_str(password, "utf-8"),
+                },
+            )
+        except UnicodeDecodeError:
+            return None
+    elif auth_type == "digest":
+        auth_map = parse_dict_header(auth_info)
+        for key in "username", "realm", "nonce", "uri", "response":
+            if key not in auth_map:
+                return None
+        if "qop" in auth_map:
+            if not auth_map.get("nc") or not auth_map.get("cnonce"):
+                return None
+        return ds.Authorization("digest", auth_map)
+    return None
+
+
+def parse_www_authenticate_header(
+    value: t.Optional[str],
+    on_update: t.Optional[t.Callable[["ds.WWWAuthenticate"], None]] = None,
+) -> "ds.WWWAuthenticate":
+    """Parse an HTTP WWW-Authenticate header into a
+    :class:`~werkzeug.datastructures.WWWAuthenticate` object.
+
+    :param value: a WWW-Authenticate header to parse.
+    :param on_update: an optional callable that is called every time a value
+                      on the :class:`~werkzeug.datastructures.WWWAuthenticate`
+                      object is changed.
+    :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object.
+    """
+    if not value:
+        return ds.WWWAuthenticate(on_update=on_update)
+    try:
+        auth_type, auth_info = value.split(None, 1)
+        auth_type = auth_type.lower()
+    except (ValueError, AttributeError):
+        return ds.WWWAuthenticate(value.strip().lower(), on_update=on_update)
+    return ds.WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update)
+
+
+def parse_if_range_header(value: t.Optional[str]) -> "ds.IfRange":
+    """Parses an if-range header which can be an etag or a date.  Returns
+    a :class:`~werkzeug.datastructures.IfRange` object.
+
+    .. versionchanged:: 2.0
+        If the value represents a datetime, it is timezone-aware.
+
+    .. versionadded:: 0.7
+    """
+    if not value:
+        return ds.IfRange()
+    date = parse_date(value)
+    if date is not None:
+        return ds.IfRange(date=date)
+    # drop weakness information
+    return ds.IfRange(unquote_etag(value)[0])
+
+
+def parse_range_header(
+    value: t.Optional[str], make_inclusive: bool = True
+) -> t.Optional["ds.Range"]:
+    """Parses a range header into a :class:`~werkzeug.datastructures.Range`
+    object.  If the header is missing or malformed `None` is returned.
+    `ranges` is a list of ``(start, stop)`` tuples where the ranges are
+    non-inclusive.
+
+    .. versionadded:: 0.7
+    """
+    if not value or "=" not in value:
+        return None
+
+    ranges = []
+    last_end = 0
+    units, rng = value.split("=", 1)
+    units = units.strip().lower()
+
+    for item in rng.split(","):
+        item = item.strip()
+        if "-" not in item:
+            return None
+        if item.startswith("-"):
+            if last_end < 0:
+                return None
+            try:
+                begin = int(item)
+            except ValueError:
+                return None
+            end = None
+            last_end = -1
+        elif "-" in item:
+            begin_str, end_str = item.split("-", 1)
+            begin_str = begin_str.strip()
+            end_str = end_str.strip()
+            if not begin_str.isdigit():
+                return None
+            begin = int(begin_str)
+            if begin < last_end or last_end < 0:
+                return None
+            if end_str:
+                if not end_str.isdigit():
+                    return None
+                end = int(end_str) + 1
+                if begin >= end:
+                    return None
+            else:
+                end = None
+            last_end = end if end is not None else -1
+        ranges.append((begin, end))
+
+    return ds.Range(units, ranges)
+
+
+def parse_content_range_header(
+    value: t.Optional[str],
+    on_update: t.Optional[t.Callable[["ds.ContentRange"], None]] = None,
+) -> t.Optional["ds.ContentRange"]:
+    """Parses a range header into a
+    :class:`~werkzeug.datastructures.ContentRange` object or `None` if
+    parsing is not possible.
+
+    .. versionadded:: 0.7
+
+    :param value: a content range header to be parsed.
+    :param on_update: an optional callable that is called every time a value
+                      on the :class:`~werkzeug.datastructures.ContentRange`
+                      object is changed.
+    """
+    if value is None:
+        return None
+    try:
+        units, rangedef = (value or "").strip().split(None, 1)
+    except ValueError:
+        return None
+
+    if "/" not in rangedef:
+        return None
+    rng, length_str = rangedef.split("/", 1)
+    if length_str == "*":
+        length = None
+    elif length_str.isdigit():
+        length = int(length_str)
+    else:
+        return None
+
+    if rng == "*":
+        return ds.ContentRange(units, None, None, length, on_update=on_update)
+    elif "-" not in rng:
+        return None
+
+    start_str, stop_str = rng.split("-", 1)
+    try:
+        start = int(start_str)
+        stop = int(stop_str) + 1
+    except ValueError:
+        return None
+
+    if is_byte_range_valid(start, stop, length):
+        return ds.ContentRange(units, start, stop, length, on_update=on_update)
+
+    return None
+
+
+def quote_etag(etag: str, weak: bool = False) -> str:
+    """Quote an etag.
+
+    :param etag: the etag to quote.
+    :param weak: set to `True` to tag it "weak".
+    """
+    if '"' in etag:
+        raise ValueError("invalid etag")
+    etag = f'"{etag}"'
+    if weak:
+        etag = f"W/{etag}"
+    return etag
+
+
+def unquote_etag(
+    etag: t.Optional[str],
+) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]:
+    """Unquote a single etag:
+
+    >>> unquote_etag('W/"bar"')
+    ('bar', True)
+    >>> unquote_etag('"bar"')
+    ('bar', False)
+
+    :param etag: the etag identifier to unquote.
+    :return: a ``(etag, weak)`` tuple.
+    """
+    if not etag:
+        return None, None
+    etag = etag.strip()
+    weak = False
+    if etag.startswith(("W/", "w/")):
+        weak = True
+        etag = etag[2:]
+    if etag[:1] == etag[-1:] == '"':
+        etag = etag[1:-1]
+    return etag, weak
+
+
+def parse_etags(value: t.Optional[str]) -> "ds.ETags":
+    """Parse an etag header.
+
+    :param value: the tag header to parse
+    :return: an :class:`~werkzeug.datastructures.ETags` object.
+    """
+    if not value:
+        return ds.ETags()
+    strong = []
+    weak = []
+    end = len(value)
+    pos = 0
+    while pos < end:
+        match = _etag_re.match(value, pos)
+        if match is None:
+            break
+        is_weak, quoted, raw = match.groups()
+        if raw == "*":
+            return ds.ETags(star_tag=True)
+        elif quoted:
+            raw = quoted
+        if is_weak:
+            weak.append(raw)
+        else:
+            strong.append(raw)
+        pos = match.end()
+    return ds.ETags(strong, weak)
+
+
+def generate_etag(data: bytes) -> str:
+    """Generate an etag for some data.
+
+    .. versionchanged:: 2.0
+        Use SHA-1. MD5 may not be available in some environments.
+    """
+    return sha1(data).hexdigest()
+
+
+def parse_date(value: t.Optional[str]) -> t.Optional[datetime]:
+    """Parse an :rfc:`2822` date into a timezone-aware
+    :class:`datetime.datetime` object, or ``None`` if parsing fails.
+
+    This is a wrapper for :func:`email.utils.parsedate_to_datetime`. It
+    returns ``None`` if parsing fails instead of raising an exception,
+    and always returns a timezone-aware datetime object. If the string
+    doesn't have timezone information, it is assumed to be UTC.
+
+    :param value: A string with a supported date format.
+
+    .. versionchanged:: 2.0
+        Return a timezone-aware datetime object. Use
+        ``email.utils.parsedate_to_datetime``.
+    """
+    if value is None:
+        return None
+
+    try:
+        dt = email.utils.parsedate_to_datetime(value)
+    except (TypeError, ValueError):
+        return None
+
+    if dt.tzinfo is None:
+        return dt.replace(tzinfo=timezone.utc)
+
+    return dt
+
+
+def cookie_date(
+    expires: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None
+) -> str:
+    """Format a datetime object or timestamp into an :rfc:`2822` date
+    string for ``Set-Cookie expires``.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :func:`http_date` instead.
+    """
+    warnings.warn(
+        "'cookie_date' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use 'http_date' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return http_date(expires)
+
+
+def http_date(
+    timestamp: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None
+) -> str:
+    """Format a datetime object or timestamp into an :rfc:`2822` date
+    string.
+
+    This is a wrapper for :func:`email.utils.format_datetime`. It
+    assumes naive datetime objects are in UTC instead of raising an
+    exception.
+
+    :param timestamp: The datetime or timestamp to format. Defaults to
+        the current time.
+
+    .. versionchanged:: 2.0
+        Use ``email.utils.format_datetime``. Accept ``date`` objects.
+    """
+    if isinstance(timestamp, date):
+        if not isinstance(timestamp, datetime):
+            # Assume plain date is midnight UTC.
+            timestamp = datetime.combine(timestamp, time(), tzinfo=timezone.utc)
+        else:
+            # Ensure datetime is timezone-aware.
+            timestamp = _dt_as_utc(timestamp)
+
+        return email.utils.format_datetime(timestamp, usegmt=True)
+
+    if isinstance(timestamp, struct_time):
+        timestamp = mktime(timestamp)
+
+    return email.utils.formatdate(timestamp, usegmt=True)
+
+
+def parse_age(value: t.Optional[str] = None) -> t.Optional[timedelta]:
+    """Parses a base-10 integer count of seconds into a timedelta.
+
+    If parsing fails, the return value is `None`.
+
+    :param value: a string consisting of an integer represented in base-10
+    :return: a :class:`datetime.timedelta` object or `None`.
+    """
+    if not value:
+        return None
+    try:
+        seconds = int(value)
+    except ValueError:
+        return None
+    if seconds < 0:
+        return None
+    try:
+        return timedelta(seconds=seconds)
+    except OverflowError:
+        return None
+
+
+def dump_age(age: t.Optional[t.Union[timedelta, int]] = None) -> t.Optional[str]:
+    """Formats the duration as a base-10 integer.
+
+    :param age: should be an integer number of seconds,
+                a :class:`datetime.timedelta` object, or,
+                if the age is unknown, `None` (default).
+    """
+    if age is None:
+        return None
+    if isinstance(age, timedelta):
+        age = int(age.total_seconds())
+    else:
+        age = int(age)
+
+    if age < 0:
+        raise ValueError("age cannot be negative")
+
+    return str(age)
+
+
+def is_resource_modified(
+    environ: "WSGIEnvironment",
+    etag: t.Optional[str] = None,
+    data: t.Optional[bytes] = None,
+    last_modified: t.Optional[t.Union[datetime, str]] = None,
+    ignore_if_range: bool = True,
+) -> bool:
+    """Convenience method for conditional requests.
+
+    :param environ: the WSGI environment of the request to be checked.
+    :param etag: the etag for the response for comparison.
+    :param data: or alternatively the data of the response to automatically
+                 generate an etag using :func:`generate_etag`.
+    :param last_modified: an optional date of the last modification.
+    :param ignore_if_range: If `False`, `If-Range` header will be taken into
+                            account.
+    :return: `True` if the resource was modified, otherwise `False`.
+
+    .. versionchanged:: 2.0
+        SHA-1 is used to generate an etag value for the data. MD5 may
+        not be available in some environments.
+
+    .. versionchanged:: 1.0.0
+        The check is run for methods other than ``GET`` and ``HEAD``.
+    """
+    if etag is None and data is not None:
+        etag = generate_etag(data)
+    elif data is not None:
+        raise TypeError("both data and etag given")
+
+    unmodified = False
+    if isinstance(last_modified, str):
+        last_modified = parse_date(last_modified)
+
+    # HTTP doesn't use microsecond, remove it to avoid false positive
+    # comparisons. Mark naive datetimes as UTC.
+    if last_modified is not None:
+        last_modified = _dt_as_utc(last_modified.replace(microsecond=0))
+
+    if_range = None
+    if not ignore_if_range and "HTTP_RANGE" in environ:
+        # https://tools.ietf.org/html/rfc7233#section-3.2
+        # A server MUST ignore an If-Range header field received in a request
+        # that does not contain a Range header field.
+        if_range = parse_if_range_header(environ.get("HTTP_IF_RANGE"))
+
+    if if_range is not None and if_range.date is not None:
+        modified_since: t.Optional[datetime] = if_range.date
+    else:
+        modified_since = parse_date(environ.get("HTTP_IF_MODIFIED_SINCE"))
+
+    if modified_since and last_modified and last_modified <= modified_since:
+        unmodified = True
+
+    if etag:
+        etag, _ = unquote_etag(etag)
+        etag = t.cast(str, etag)
+
+        if if_range is not None and if_range.etag is not None:
+            unmodified = parse_etags(if_range.etag).contains(etag)
+        else:
+            if_none_match = parse_etags(environ.get("HTTP_IF_NONE_MATCH"))
+            if if_none_match:
+                # https://tools.ietf.org/html/rfc7232#section-3.2
+                # "A recipient MUST use the weak comparison function when comparing
+                # entity-tags for If-None-Match"
+                unmodified = if_none_match.contains_weak(etag)
+
+            # https://tools.ietf.org/html/rfc7232#section-3.1
+            # "Origin server MUST use the strong comparison function when
+            # comparing entity-tags for If-Match"
+            if_match = parse_etags(environ.get("HTTP_IF_MATCH"))
+            if if_match:
+                unmodified = not if_match.is_strong(etag)
+
+    return not unmodified
+
+
+def remove_entity_headers(
+    headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]],
+    allowed: t.Iterable[str] = ("expires", "content-location"),
+) -> None:
+    """Remove all entity headers from a list or :class:`Headers` object.  This
+    operation works in-place.  `Expires` and `Content-Location` headers are
+    by default not removed.  The reason for this is :rfc:`2616` section
+    10.3.5 which specifies some entity headers that should be sent.
+
+    .. versionchanged:: 0.5
+       added `allowed` parameter.
+
+    :param headers: a list or :class:`Headers` object.
+    :param allowed: a list of headers that should still be allowed even though
+                    they are entity headers.
+    """
+    allowed = {x.lower() for x in allowed}
+    headers[:] = [
+        (key, value)
+        for key, value in headers
+        if not is_entity_header(key) or key.lower() in allowed
+    ]
+
+
+def remove_hop_by_hop_headers(
+    headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]]
+) -> None:
+    """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or
+    :class:`Headers` object.  This operation works in-place.
+
+    .. versionadded:: 0.5
+
+    :param headers: a list or :class:`Headers` object.
+    """
+    headers[:] = [
+        (key, value) for key, value in headers if not is_hop_by_hop_header(key)
+    ]
+
+
+def is_entity_header(header: str) -> bool:
+    """Check if a header is an entity header.
+
+    .. versionadded:: 0.5
+
+    :param header: the header to test.
+    :return: `True` if it's an entity header, `False` otherwise.
+    """
+    return header.lower() in _entity_headers
+
+
+def is_hop_by_hop_header(header: str) -> bool:
+    """Check if a header is an HTTP/1.1 "Hop-by-Hop" header.
+
+    .. versionadded:: 0.5
+
+    :param header: the header to test.
+    :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise.
+    """
+    return header.lower() in _hop_by_hop_headers
+
+
+def parse_cookie(
+    header: t.Union["WSGIEnvironment", str, bytes, None],
+    charset: str = "utf-8",
+    errors: str = "replace",
+    cls: t.Optional[t.Type["ds.MultiDict"]] = None,
+) -> "ds.MultiDict[str, str]":
+    """Parse a cookie from a string or WSGI environ.
+
+    The same key can be provided multiple times, the values are stored
+    in-order. The default :class:`MultiDict` will have the first value
+    first, and all values can be retrieved with
+    :meth:`MultiDict.getlist`.
+
+    :param header: The cookie header as a string, or a WSGI environ dict
+        with a ``HTTP_COOKIE`` key.
+    :param charset: The charset for the cookie values.
+    :param errors: The error behavior for the charset decoding.
+    :param cls: A dict-like class to store the parsed cookies in.
+        Defaults to :class:`MultiDict`.
+
+    .. versionchanged:: 1.0.0
+        Returns a :class:`MultiDict` instead of a
+        ``TypeConversionDict``.
+
+    .. versionchanged:: 0.5
+       Returns a :class:`TypeConversionDict` instead of a regular dict.
+       The ``cls`` parameter was added.
+    """
+    if isinstance(header, dict):
+        header = header.get("HTTP_COOKIE", "")
+    elif header is None:
+        header = ""
+
+    # PEP 3333 sends headers through the environ as latin1 decoded
+    # strings. Encode strings back to bytes for parsing.
+    if isinstance(header, str):
+        header = header.encode("latin1", "replace")
+
+    if cls is None:
+        cls = ds.MultiDict
+
+    def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]:
+        for key, val in _cookie_parse_impl(header):  # type: ignore
+            key_str = _to_str(key, charset, errors, allow_none_charset=True)
+
+            if not key_str:
+                continue
+
+            val_str = _to_str(val, charset, errors, allow_none_charset=True)
+            yield key_str, val_str
+
+    return cls(_parse_pairs())
+
+
+def dump_cookie(
+    key: str,
+    value: t.Union[bytes, str] = "",
+    max_age: t.Optional[t.Union[timedelta, int]] = None,
+    expires: t.Optional[t.Union[str, datetime, int, float]] = None,
+    path: t.Optional[str] = "/",
+    domain: t.Optional[str] = None,
+    secure: bool = False,
+    httponly: bool = False,
+    charset: str = "utf-8",
+    sync_expires: bool = True,
+    max_size: int = 4093,
+    samesite: t.Optional[str] = None,
+) -> str:
+    """Create a Set-Cookie header without the ``Set-Cookie`` prefix.
+
+    The return value is usually restricted to ascii as the vast majority
+    of values are properly escaped, but that is no guarantee. It's
+    tunneled through latin1 as required by :pep:`3333`.
+
+    The return value is not ASCII safe if the key contains unicode
+    characters.  This is technically against the specification but
+    happens in the wild.  It's strongly recommended to not use
+    non-ASCII values for the keys.
+
+    :param max_age: should be a number of seconds, or `None` (default) if
+                    the cookie should last only as long as the client's
+                    browser session.  Additionally `timedelta` objects
+                    are accepted, too.
+    :param expires: should be a `datetime` object or unix timestamp.
+    :param path: limits the cookie to a given path, per default it will
+                 span the whole domain.
+    :param domain: Use this if you want to set a cross-domain cookie. For
+                   example, ``domain=".example.com"`` will set a cookie
+                   that is readable by the domain ``www.example.com``,
+                   ``foo.example.com`` etc. Otherwise, a cookie will only
+                   be readable by the domain that set it.
+    :param secure: The cookie will only be available via HTTPS
+    :param httponly: disallow JavaScript to access the cookie.  This is an
+                     extension to the cookie standard and probably not
+                     supported by all browsers.
+    :param charset: the encoding for string values.
+    :param sync_expires: automatically set expires if max_age is defined
+                         but expires not.
+    :param max_size: Warn if the final header value exceeds this size. The
+        default, 4093, should be safely `supported by most browsers
+        <cookie_>`_. Set to 0 to disable this check.
+    :param samesite: Limits the scope of the cookie such that it will
+        only be attached to requests if those requests are same-site.
+
+    .. _`cookie`: http://browsercookielimits.squawky.net/
+
+    .. versionchanged:: 1.0.0
+        The string ``'None'`` is accepted for ``samesite``.
+    """
+    key = _to_bytes(key, charset)
+    value = _to_bytes(value, charset)
+
+    if path is not None:
+        from .urls import iri_to_uri
+
+        path = iri_to_uri(path, charset)
+
+    domain = _make_cookie_domain(domain)
+
+    if isinstance(max_age, timedelta):
+        max_age = int(max_age.total_seconds())
+
+    if expires is not None:
+        if not isinstance(expires, str):
+            expires = http_date(expires)
+    elif max_age is not None and sync_expires:
+        expires = http_date(datetime.now(tz=timezone.utc).timestamp() + max_age)
+
+    if samesite is not None:
+        samesite = samesite.title()
+
+        if samesite not in {"Strict", "Lax", "None"}:
+            raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.")
+
+    buf = [key + b"=" + _cookie_quote(value)]
+
+    # XXX: In theory all of these parameters that are not marked with `None`
+    # should be quoted.  Because stdlib did not quote it before I did not
+    # want to introduce quoting there now.
+    for k, v, q in (
+        (b"Domain", domain, True),
+        (b"Expires", expires, False),
+        (b"Max-Age", max_age, False),
+        (b"Secure", secure, None),
+        (b"HttpOnly", httponly, None),
+        (b"Path", path, False),
+        (b"SameSite", samesite, False),
+    ):
+        if q is None:
+            if v:
+                buf.append(k)
+            continue
+
+        if v is None:
+            continue
+
+        tmp = bytearray(k)
+        if not isinstance(v, (bytes, bytearray)):
+            v = _to_bytes(str(v), charset)
+        if q:
+            v = _cookie_quote(v)
+        tmp += b"=" + v
+        buf.append(bytes(tmp))
+
+    # The return value will be an incorrectly encoded latin1 header for
+    # consistency with the headers object.
+    rv = b"; ".join(buf)
+    rv = rv.decode("latin1")
+
+    # Warn if the final value of the cookie is larger than the limit. If the
+    # cookie is too large, then it may be silently ignored by the browser,
+    # which can be quite hard to debug.
+    cookie_size = len(rv)
+
+    if max_size and cookie_size > max_size:
+        value_size = len(value)
+        warnings.warn(
+            f"The {key.decode(charset)!r} cookie is too large: the value was"
+            f" {value_size} bytes but the"
+            f" header required {cookie_size - value_size} extra bytes. The final size"
+            f" was {cookie_size} bytes but the limit is {max_size} bytes. Browsers may"
+            f" silently ignore cookies larger than this.",
+            stacklevel=2,
+        )
+
+    return rv
+
+
+def is_byte_range_valid(
+    start: t.Optional[int], stop: t.Optional[int], length: t.Optional[int]
+) -> bool:
+    """Checks if a given byte content range is valid for the given length.
+
+    .. versionadded:: 0.7
+    """
+    if (start is None) != (stop is None):
+        return False
+    elif start is None:
+        return length is None or length >= 0
+    elif length is None:
+        return 0 <= start < stop  # type: ignore
+    elif start >= stop:  # type: ignore
+        return False
+    return 0 <= start < length
+
+
+# circular dependencies
+from . import datastructures as ds
diff --git a/venv/lib/python3.7/site-packages/werkzeug/local.py b/venv/lib/python3.7/site-packages/werkzeug/local.py
new file mode 100644
index 00000000..a5a7870e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/local.py
@@ -0,0 +1,666 @@
+import copy
+import math
+import operator
+import sys
+import typing as t
+import warnings
+from functools import partial
+from functools import update_wrapper
+
+from .wsgi import ClosingIterator
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+try:
+    from greenlet import getcurrent as _get_ident
+except ImportError:
+    from threading import get_ident as _get_ident
+
+
+def get_ident() -> int:
+    warnings.warn(
+        "'get_ident' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use 'greenlet.getcurrent' or 'threading.get_ident' for"
+        " previous behavior.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return _get_ident()  # type: ignore
+
+
+class _CannotUseContextVar(Exception):
+    pass
+
+
+try:
+    from contextvars import ContextVar
+
+    if "gevent" in sys.modules or "eventlet" in sys.modules:
+        # Both use greenlet, so first check it has patched
+        # ContextVars, Greenlet <0.4.17 does not.
+        import greenlet
+
+        greenlet_patched = getattr(greenlet, "GREENLET_USE_CONTEXT_VARS", False)
+
+        if not greenlet_patched:
+            # If Gevent is used, check it has patched ContextVars,
+            # <20.5 does not.
+            try:
+                from gevent.monkey import is_object_patched
+            except ImportError:
+                # Gevent isn't used, but Greenlet is and hasn't patched
+                raise _CannotUseContextVar()
+            else:
+                if is_object_patched("threading", "local") and not is_object_patched(
+                    "contextvars", "ContextVar"
+                ):
+                    raise _CannotUseContextVar()
+
+
+except (ImportError, _CannotUseContextVar):
+
+    class ContextVar:  # type: ignore
+        """A fake ContextVar based on the previous greenlet/threading
+        ident function. Used on Python 3.6, eventlet, and old versions
+        of gevent.
+        """
+
+        def __init__(self, _name: str) -> None:
+            self.storage: t.Dict[int, t.Dict[str, t.Any]] = {}
+
+        def get(self, default: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
+            return self.storage.get(_get_ident(), default)
+
+        def set(self, value: t.Dict[str, t.Any]) -> None:
+            self.storage[_get_ident()] = value
+
+
+def release_local(local: t.Union["Local", "LocalStack"]) -> None:
+    """Releases the contents of the local for the current context.
+    This makes it possible to use locals without a manager.
+
+    Example::
+
+        >>> loc = Local()
+        >>> loc.foo = 42
+        >>> release_local(loc)
+        >>> hasattr(loc, 'foo')
+        False
+
+    With this function one can release :class:`Local` objects as well
+    as :class:`LocalStack` objects.  However it is not possible to
+    release data held by proxies that way, one always has to retain
+    a reference to the underlying local object in order to be able
+    to release it.
+
+    .. versionadded:: 0.6.1
+    """
+    local.__release_local__()
+
+
+class Local:
+    __slots__ = ("_storage",)
+
+    def __init__(self) -> None:
+        object.__setattr__(self, "_storage", ContextVar("local_storage"))
+
+    @property
+    def __storage__(self) -> t.Dict[str, t.Any]:
+        warnings.warn(
+            "'__storage__' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self._storage.get({})  # type: ignore
+
+    @property
+    def __ident_func__(self) -> t.Callable[[], int]:
+        warnings.warn(
+            "'__ident_func__' is deprecated and will be removed in"
+            " Werkzeug 2.1. It should not be used in Python 3.7+.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return _get_ident  # type: ignore
+
+    @__ident_func__.setter
+    def __ident_func__(self, func: t.Callable[[], int]) -> None:
+        warnings.warn(
+            "'__ident_func__' is deprecated and will be removed in"
+            " Werkzeug 2.1. Setting it no longer has any effect.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+    def __iter__(self) -> t.Iterator[t.Tuple[int, t.Any]]:
+        return iter(self._storage.get({}).items())
+
+    def __call__(self, proxy: str) -> "LocalProxy":
+        """Create a proxy for a name."""
+        return LocalProxy(self, proxy)
+
+    def __release_local__(self) -> None:
+        self._storage.set({})
+
+    def __getattr__(self, name: str) -> t.Any:
+        values = self._storage.get({})
+        try:
+            return values[name]
+        except KeyError:
+            raise AttributeError(name)
+
+    def __setattr__(self, name: str, value: t.Any) -> None:
+        values = self._storage.get({}).copy()
+        values[name] = value
+        self._storage.set(values)
+
+    def __delattr__(self, name: str) -> None:
+        values = self._storage.get({}).copy()
+        try:
+            del values[name]
+            self._storage.set(values)
+        except KeyError:
+            raise AttributeError(name)
+
+
+class LocalStack:
+    """This class works similar to a :class:`Local` but keeps a stack
+    of objects instead.  This is best explained with an example::
+
+        >>> ls = LocalStack()
+        >>> ls.push(42)
+        >>> ls.top
+        42
+        >>> ls.push(23)
+        >>> ls.top
+        23
+        >>> ls.pop()
+        23
+        >>> ls.top
+        42
+
+    They can be force released by using a :class:`LocalManager` or with
+    the :func:`release_local` function but the correct way is to pop the
+    item from the stack after using.  When the stack is empty it will
+    no longer be bound to the current context (and as such released).
+
+    By calling the stack without arguments it returns a proxy that resolves to
+    the topmost item on the stack.
+
+    .. versionadded:: 0.6.1
+    """
+
+    def __init__(self) -> None:
+        self._local = Local()
+
+    def __release_local__(self) -> None:
+        self._local.__release_local__()
+
+    @property
+    def __ident_func__(self) -> t.Callable[[], int]:
+        return self._local.__ident_func__
+
+    @__ident_func__.setter
+    def __ident_func__(self, value: t.Callable[[], int]) -> None:
+        object.__setattr__(self._local, "__ident_func__", value)
+
+    def __call__(self) -> "LocalProxy":
+        def _lookup() -> t.Any:
+            rv = self.top
+            if rv is None:
+                raise RuntimeError("object unbound")
+            return rv
+
+        return LocalProxy(_lookup)
+
+    def push(self, obj: t.Any) -> t.List[t.Any]:
+        """Pushes a new item to the stack"""
+        rv = getattr(self._local, "stack", []).copy()
+        rv.append(obj)
+        self._local.stack = rv
+        return rv  # type: ignore
+
+    def pop(self) -> t.Any:
+        """Removes the topmost item from the stack, will return the
+        old value or `None` if the stack was already empty.
+        """
+        stack = getattr(self._local, "stack", None)
+        if stack is None:
+            return None
+        elif len(stack) == 1:
+            release_local(self._local)
+            return stack[-1]
+        else:
+            return stack.pop()
+
+    @property
+    def top(self) -> t.Any:
+        """The topmost item on the stack.  If the stack is empty,
+        `None` is returned.
+        """
+        try:
+            return self._local.stack[-1]
+        except (AttributeError, IndexError):
+            return None
+
+
+class LocalManager:
+    """Local objects cannot manage themselves. For that you need a local
+    manager. You can pass a local manager multiple locals or add them
+    later y appending them to `manager.locals`. Every time the manager
+    cleans up, it will clean up all the data left in the locals for this
+    context.
+
+    .. versionchanged:: 2.0
+        ``ident_func`` is deprecated and will be removed in Werkzeug
+         2.1.
+
+    .. versionchanged:: 0.6.1
+        The :func:`release_local` function can be used instead of a
+        manager.
+
+    .. versionchanged:: 0.7
+        The ``ident_func`` parameter was added.
+    """
+
+    def __init__(
+        self,
+        locals: t.Optional[t.Iterable[t.Union[Local, LocalStack]]] = None,
+        ident_func: None = None,
+    ) -> None:
+        if locals is None:
+            self.locals = []
+        elif isinstance(locals, Local):
+            self.locals = [locals]
+        else:
+            self.locals = list(locals)
+
+        if ident_func is not None:
+            warnings.warn(
+                "'ident_func' is deprecated and will be removed in"
+                " Werkzeug 2.1. Setting it no longer has any effect.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+    @property
+    def ident_func(self) -> t.Callable[[], int]:
+        warnings.warn(
+            "'ident_func' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return _get_ident  # type: ignore
+
+    @ident_func.setter
+    def ident_func(self, func: t.Callable[[], int]) -> None:
+        warnings.warn(
+            "'ident_func' is deprecated and will be removedin Werkzeug"
+            " 2.1. Setting it no longer has any effect.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+    def get_ident(self) -> int:
+        """Return the context identifier the local objects use internally for
+        this context.  You cannot override this method to change the behavior
+        but use it to link other context local objects (such as SQLAlchemy's
+        scoped sessions) to the Werkzeug locals.
+
+        .. deprecated:: 2.0
+            Will be removed in Werkzeug 2.1.
+
+        .. versionchanged:: 0.7
+           You can pass a different ident function to the local manager that
+           will then be propagated to all the locals passed to the
+           constructor.
+        """
+        warnings.warn(
+            "'get_ident' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self.ident_func()
+
+    def cleanup(self) -> None:
+        """Manually clean up the data in the locals for this context.  Call
+        this at the end of the request or use `make_middleware()`.
+        """
+        for local in self.locals:
+            release_local(local)
+
+    def make_middleware(self, app: "WSGIApplication") -> "WSGIApplication":
+        """Wrap a WSGI application so that cleaning up happens after
+        request end.
+        """
+
+        def application(
+            environ: "WSGIEnvironment", start_response: "StartResponse"
+        ) -> t.Iterable[bytes]:
+            return ClosingIterator(app(environ, start_response), self.cleanup)
+
+        return application
+
+    def middleware(self, func: "WSGIApplication") -> "WSGIApplication":
+        """Like `make_middleware` but for decorating functions.
+
+        Example usage::
+
+            @manager.middleware
+            def application(environ, start_response):
+                ...
+
+        The difference to `make_middleware` is that the function passed
+        will have all the arguments copied from the inner application
+        (name, docstring, module).
+        """
+        return update_wrapper(self.make_middleware(func), func)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} storages: {len(self.locals)}>"
+
+
+class _ProxyLookup:
+    """Descriptor that handles proxied attribute lookup for
+    :class:`LocalProxy`.
+
+    :param f: The built-in function this attribute is accessed through.
+        Instead of looking up the special method, the function call
+        is redone on the object.
+    :param fallback: Call this method if the proxy is unbound instead of
+        raising a :exc:`RuntimeError`.
+    :param class_value: Value to return when accessed from the class.
+        Used for ``__doc__`` so building docs still works.
+    """
+
+    __slots__ = ("bind_f", "fallback", "class_value", "name")
+
+    def __init__(
+        self,
+        f: t.Optional[t.Callable] = None,
+        fallback: t.Optional[t.Callable] = None,
+        class_value: t.Optional[t.Any] = None,
+    ) -> None:
+        bind_f: t.Optional[t.Callable[["LocalProxy", t.Any], t.Callable]]
+
+        if hasattr(f, "__get__"):
+            # A Python function, can be turned into a bound method.
+
+            def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
+                return f.__get__(obj, type(obj))  # type: ignore
+
+        elif f is not None:
+            # A C function, use partial to bind the first argument.
+
+            def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
+                return partial(f, obj)  # type: ignore
+
+        else:
+            # Use getattr, which will produce a bound method.
+            bind_f = None
+
+        self.bind_f = bind_f
+        self.fallback = fallback
+        self.class_value = class_value
+
+    def __set_name__(self, owner: "LocalProxy", name: str) -> None:
+        self.name = name
+
+    def __get__(self, instance: "LocalProxy", owner: t.Optional[type] = None) -> t.Any:
+        if instance is None:
+            if self.class_value is not None:
+                return self.class_value
+
+            return self
+
+        try:
+            obj = instance._get_current_object()
+        except RuntimeError:
+            if self.fallback is None:
+                raise
+
+            return self.fallback.__get__(instance, owner)  # type: ignore
+
+        if self.bind_f is not None:
+            return self.bind_f(instance, obj)
+
+        return getattr(obj, self.name)
+
+    def __repr__(self) -> str:
+        return f"proxy {self.name}"
+
+    def __call__(self, instance: "LocalProxy", *args: t.Any, **kwargs: t.Any) -> t.Any:
+        """Support calling unbound methods from the class. For example,
+        this happens with ``copy.copy``, which does
+        ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it
+        returns the proxy type and descriptor.
+        """
+        return self.__get__(instance, type(instance))(*args, **kwargs)
+
+
+class _ProxyIOp(_ProxyLookup):
+    """Look up an augmented assignment method on a proxied object. The
+    method is wrapped to return the proxy instead of the object.
+    """
+
+    __slots__ = ()
+
+    def __init__(
+        self, f: t.Optional[t.Callable] = None, fallback: t.Optional[t.Callable] = None
+    ) -> None:
+        super().__init__(f, fallback)
+
+        def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
+            def i_op(self: t.Any, other: t.Any) -> "LocalProxy":
+                f(self, other)  # type: ignore
+                return instance
+
+            return i_op.__get__(obj, type(obj))  # type: ignore
+
+        self.bind_f = bind_f
+
+
+def _l_to_r_op(op: F) -> F:
+    """Swap the argument order to turn an l-op into an r-op."""
+
+    def r_op(obj: t.Any, other: t.Any) -> t.Any:
+        return op(other, obj)
+
+    return t.cast(F, r_op)
+
+
+class LocalProxy:
+    """A proxy to the object bound to a :class:`Local`. All operations
+    on the proxy are forwarded to the bound object. If no object is
+    bound, a :exc:`RuntimeError` is raised.
+
+    .. code-block:: python
+
+        from werkzeug.local import Local
+        l = Local()
+
+        # a proxy to whatever l.user is set to
+        user = l("user")
+
+        from werkzeug.local import LocalStack
+        _request_stack = LocalStack()
+
+        # a proxy to _request_stack.top
+        request = _request_stack()
+
+        # a proxy to the session attribute of the request proxy
+        session = LocalProxy(lambda: request.session)
+
+    ``__repr__`` and ``__class__`` are forwarded, so ``repr(x)`` and
+    ``isinstance(x, cls)`` will look like the proxied object. Use
+    ``issubclass(type(x), LocalProxy)`` to check if an object is a
+    proxy.
+
+    .. code-block:: python
+
+        repr(user)  # <User admin>
+        isinstance(user, User)  # True
+        issubclass(type(user), LocalProxy)  # True
+
+    :param local: The :class:`Local` or callable that provides the
+        proxied object.
+    :param name: The attribute name to look up on a :class:`Local`. Not
+        used if a callable is given.
+
+    .. versionchanged:: 2.0
+        Updated proxied attributes and methods to reflect the current
+        data model.
+
+    .. versionchanged:: 0.6.1
+        The class can be instantiated with a callable.
+    """
+
+    __slots__ = ("__local", "__name", "__wrapped__")
+
+    def __init__(
+        self,
+        local: t.Union["Local", t.Callable[[], t.Any]],
+        name: t.Optional[str] = None,
+    ) -> None:
+        object.__setattr__(self, "_LocalProxy__local", local)
+        object.__setattr__(self, "_LocalProxy__name", name)
+
+        if callable(local) and not hasattr(local, "__release_local__"):
+            # "local" is a callable that is not an instance of Local or
+            # LocalManager: mark it as a wrapped function.
+            object.__setattr__(self, "__wrapped__", local)
+
+    def _get_current_object(self) -> t.Any:
+        """Return the current object.  This is useful if you want the real
+        object behind the proxy at a time for performance reasons or because
+        you want to pass the object into a different context.
+        """
+        if not hasattr(self.__local, "__release_local__"):  # type: ignore
+            return self.__local()  # type: ignore
+
+        try:
+            return getattr(self.__local, self.__name)  # type: ignore
+        except AttributeError:
+            raise RuntimeError(f"no object bound to {self.__name}")  # type: ignore
+
+    __doc__ = _ProxyLookup(  # type: ignore
+        class_value=__doc__, fallback=lambda self: type(self).__doc__
+    )
+    # __del__ should only delete the proxy
+    __repr__ = _ProxyLookup(  # type: ignore
+        repr, fallback=lambda self: f"<{type(self).__name__} unbound>"
+    )
+    __str__ = _ProxyLookup(str)  # type: ignore
+    __bytes__ = _ProxyLookup(bytes)
+    __format__ = _ProxyLookup()  # type: ignore
+    __lt__ = _ProxyLookup(operator.lt)
+    __le__ = _ProxyLookup(operator.le)
+    __eq__ = _ProxyLookup(operator.eq)  # type: ignore
+    __ne__ = _ProxyLookup(operator.ne)  # type: ignore
+    __gt__ = _ProxyLookup(operator.gt)
+    __ge__ = _ProxyLookup(operator.ge)
+    __hash__ = _ProxyLookup(hash)  # type: ignore
+    __bool__ = _ProxyLookup(bool, fallback=lambda self: False)
+    __getattr__ = _ProxyLookup(getattr)
+    # __getattribute__ triggered through __getattr__
+    __setattr__ = _ProxyLookup(setattr)  # type: ignore
+    __delattr__ = _ProxyLookup(delattr)  # type: ignore
+    __dir__ = _ProxyLookup(dir, fallback=lambda self: [])  # type: ignore
+    # __get__ (proxying descriptor not supported)
+    # __set__ (descriptor)
+    # __delete__ (descriptor)
+    # __set_name__ (descriptor)
+    # __objclass__ (descriptor)
+    # __slots__ used by proxy itself
+    # __dict__ (__getattr__)
+    # __weakref__ (__getattr__)
+    # __init_subclass__ (proxying metaclass not supported)
+    # __prepare__ (metaclass)
+    __class__ = _ProxyLookup(fallback=lambda self: type(self))  # type: ignore
+    __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self))
+    __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self))
+    # __class_getitem__ triggered through __getitem__
+    __call__ = _ProxyLookup(lambda self, *args, **kwargs: self(*args, **kwargs))
+    __len__ = _ProxyLookup(len)
+    __length_hint__ = _ProxyLookup(operator.length_hint)
+    __getitem__ = _ProxyLookup(operator.getitem)
+    __setitem__ = _ProxyLookup(operator.setitem)
+    __delitem__ = _ProxyLookup(operator.delitem)
+    # __missing__ triggered through __getitem__
+    __iter__ = _ProxyLookup(iter)
+    __next__ = _ProxyLookup(next)
+    __reversed__ = _ProxyLookup(reversed)
+    __contains__ = _ProxyLookup(operator.contains)
+    __add__ = _ProxyLookup(operator.add)
+    __sub__ = _ProxyLookup(operator.sub)
+    __mul__ = _ProxyLookup(operator.mul)
+    __matmul__ = _ProxyLookup(operator.matmul)
+    __truediv__ = _ProxyLookup(operator.truediv)
+    __floordiv__ = _ProxyLookup(operator.floordiv)
+    __mod__ = _ProxyLookup(operator.mod)
+    __divmod__ = _ProxyLookup(divmod)
+    __pow__ = _ProxyLookup(pow)
+    __lshift__ = _ProxyLookup(operator.lshift)
+    __rshift__ = _ProxyLookup(operator.rshift)
+    __and__ = _ProxyLookup(operator.and_)
+    __xor__ = _ProxyLookup(operator.xor)
+    __or__ = _ProxyLookup(operator.or_)
+    __radd__ = _ProxyLookup(_l_to_r_op(operator.add))
+    __rsub__ = _ProxyLookup(_l_to_r_op(operator.sub))
+    __rmul__ = _ProxyLookup(_l_to_r_op(operator.mul))
+    __rmatmul__ = _ProxyLookup(_l_to_r_op(operator.matmul))
+    __rtruediv__ = _ProxyLookup(_l_to_r_op(operator.truediv))
+    __rfloordiv__ = _ProxyLookup(_l_to_r_op(operator.floordiv))
+    __rmod__ = _ProxyLookup(_l_to_r_op(operator.mod))
+    __rdivmod__ = _ProxyLookup(_l_to_r_op(divmod))
+    __rpow__ = _ProxyLookup(_l_to_r_op(pow))
+    __rlshift__ = _ProxyLookup(_l_to_r_op(operator.lshift))
+    __rrshift__ = _ProxyLookup(_l_to_r_op(operator.rshift))
+    __rand__ = _ProxyLookup(_l_to_r_op(operator.and_))
+    __rxor__ = _ProxyLookup(_l_to_r_op(operator.xor))
+    __ror__ = _ProxyLookup(_l_to_r_op(operator.or_))
+    __iadd__ = _ProxyIOp(operator.iadd)
+    __isub__ = _ProxyIOp(operator.isub)
+    __imul__ = _ProxyIOp(operator.imul)
+    __imatmul__ = _ProxyIOp(operator.imatmul)
+    __itruediv__ = _ProxyIOp(operator.itruediv)
+    __ifloordiv__ = _ProxyIOp(operator.ifloordiv)
+    __imod__ = _ProxyIOp(operator.imod)
+    __ipow__ = _ProxyIOp(operator.ipow)
+    __ilshift__ = _ProxyIOp(operator.ilshift)
+    __irshift__ = _ProxyIOp(operator.irshift)
+    __iand__ = _ProxyIOp(operator.iand)
+    __ixor__ = _ProxyIOp(operator.ixor)
+    __ior__ = _ProxyIOp(operator.ior)
+    __neg__ = _ProxyLookup(operator.neg)
+    __pos__ = _ProxyLookup(operator.pos)
+    __abs__ = _ProxyLookup(abs)
+    __invert__ = _ProxyLookup(operator.invert)
+    __complex__ = _ProxyLookup(complex)
+    __int__ = _ProxyLookup(int)
+    __float__ = _ProxyLookup(float)
+    __index__ = _ProxyLookup(operator.index)
+    __round__ = _ProxyLookup(round)
+    __trunc__ = _ProxyLookup(math.trunc)
+    __floor__ = _ProxyLookup(math.floor)
+    __ceil__ = _ProxyLookup(math.ceil)
+    __enter__ = _ProxyLookup()
+    __exit__ = _ProxyLookup()
+    __await__ = _ProxyLookup()
+    __aiter__ = _ProxyLookup()
+    __anext__ = _ProxyLookup()
+    __aenter__ = _ProxyLookup()
+    __aexit__ = _ProxyLookup()
+    __copy__ = _ProxyLookup(copy.copy)
+    __deepcopy__ = _ProxyLookup(copy.deepcopy)
+    # __getnewargs_ex__ (pickle through proxy not supported)
+    # __getnewargs__ (pickle)
+    # __getstate__ (pickle)
+    # __setstate__ (pickle)
+    # __reduce__ (pickle)
+    # __reduce_ex__ (pickle)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/__init__.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/__init__.py
new file mode 100644
index 00000000..6ddcf7f5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/__init__.py
@@ -0,0 +1,22 @@
+"""
+Middleware
+==========
+
+A WSGI middleware is a WSGI application that wraps another application
+in order to observe or change its behavior. Werkzeug provides some
+middleware for common use cases.
+
+.. toctree::
+    :maxdepth: 1
+
+    proxy_fix
+    shared_data
+    dispatcher
+    http_proxy
+    lint
+    profiler
+
+The :doc:`interactive debugger </debug>` is also a middleware that can
+be applied manually, although it is typically used automatically with
+the :doc:`development server </serving>`.
+"""
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/dispatcher.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/dispatcher.py
new file mode 100644
index 00000000..ace1c750
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/dispatcher.py
@@ -0,0 +1,78 @@
+"""
+Application Dispatcher
+======================
+
+This middleware creates a single WSGI application that dispatches to
+multiple other WSGI applications mounted at different URL paths.
+
+A common example is writing a Single Page Application, where you have a
+backend API and a frontend written in JavaScript that does the routing
+in the browser rather than requesting different pages from the server.
+The frontend is a single HTML and JS file that should be served for any
+path besides "/api".
+
+This example dispatches to an API app under "/api", an admin app
+under "/admin", and an app that serves frontend files for all other
+requests::
+
+    app = DispatcherMiddleware(serve_frontend, {
+        '/api': api_app,
+        '/admin': admin_app,
+    })
+
+In production, you might instead handle this at the HTTP server level,
+serving files or proxying to application servers based on location. The
+API and admin apps would each be deployed with a separate WSGI server,
+and the static files would be served directly by the HTTP server.
+
+.. autoclass:: DispatcherMiddleware
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import typing as t
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class DispatcherMiddleware:
+    """Combine multiple applications as a single WSGI application.
+    Requests are dispatched to an application based on the path it is
+    mounted under.
+
+    :param app: The WSGI application to dispatch to if the request
+        doesn't match a mounted path.
+    :param mounts: Maps path prefixes to applications for dispatching.
+    """
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        mounts: t.Optional[t.Dict[str, "WSGIApplication"]] = None,
+    ) -> None:
+        self.app = app
+        self.mounts = mounts or {}
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        script = environ.get("PATH_INFO", "")
+        path_info = ""
+
+        while "/" in script:
+            if script in self.mounts:
+                app = self.mounts[script]
+                break
+
+            script, last_item = script.rsplit("/", 1)
+            path_info = f"/{last_item}{path_info}"
+        else:
+            app = self.mounts.get(script, self.app)
+
+        original_script_name = environ.get("SCRIPT_NAME", "")
+        environ["SCRIPT_NAME"] = original_script_name + script
+        environ["PATH_INFO"] = path_info
+        return app(environ, start_response)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/http_proxy.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/http_proxy.py
new file mode 100644
index 00000000..1cde458d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/http_proxy.py
@@ -0,0 +1,230 @@
+"""
+Basic HTTP Proxy
+================
+
+.. autoclass:: ProxyMiddleware
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import typing as t
+from http import client
+
+from ..datastructures import EnvironHeaders
+from ..http import is_hop_by_hop_header
+from ..urls import url_parse
+from ..urls import url_quote
+from ..wsgi import get_input_stream
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class ProxyMiddleware:
+    """Proxy requests under a path to an external server, routing other
+    requests to the app.
+
+    This middleware can only proxy HTTP requests, as HTTP is the only
+    protocol handled by the WSGI server. Other protocols, such as
+    WebSocket requests, cannot be proxied at this layer. This should
+    only be used for development, in production a real proxy server
+    should be used.
+
+    The middleware takes a dict mapping a path prefix to a dict
+    describing the host to be proxied to::
+
+        app = ProxyMiddleware(app, {
+            "/static/": {
+                "target": "http://127.0.0.1:5001/",
+            }
+        })
+
+    Each host has the following options:
+
+    ``target``:
+        The target URL to dispatch to. This is required.
+    ``remove_prefix``:
+        Whether to remove the prefix from the URL before dispatching it
+        to the target. The default is ``False``.
+    ``host``:
+        ``"<auto>"`` (default):
+            The host header is automatically rewritten to the URL of the
+            target.
+        ``None``:
+            The host header is unmodified from the client request.
+        Any other value:
+            The host header is overwritten with the value.
+    ``headers``:
+        A dictionary of headers to be sent with the request to the
+        target. The default is ``{}``.
+    ``ssl_context``:
+        A :class:`ssl.SSLContext` defining how to verify requests if the
+        target is HTTPS. The default is ``None``.
+
+    In the example above, everything under ``"/static/"`` is proxied to
+    the server on port 5001. The host header is rewritten to the target,
+    and the ``"/static/"`` prefix is removed from the URLs.
+
+    :param app: The WSGI application to wrap.
+    :param targets: Proxy target configurations. See description above.
+    :param chunk_size: Size of chunks to read from input stream and
+        write to target.
+    :param timeout: Seconds before an operation to a target fails.
+
+    .. versionadded:: 0.14
+    """
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        targets: t.Mapping[str, t.Dict[str, t.Any]],
+        chunk_size: int = 2 << 13,
+        timeout: int = 10,
+    ) -> None:
+        def _set_defaults(opts: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
+            opts.setdefault("remove_prefix", False)
+            opts.setdefault("host", "<auto>")
+            opts.setdefault("headers", {})
+            opts.setdefault("ssl_context", None)
+            return opts
+
+        self.app = app
+        self.targets = {
+            f"/{k.strip('/')}/": _set_defaults(v) for k, v in targets.items()
+        }
+        self.chunk_size = chunk_size
+        self.timeout = timeout
+
+    def proxy_to(
+        self, opts: t.Dict[str, t.Any], path: str, prefix: str
+    ) -> "WSGIApplication":
+        target = url_parse(opts["target"])
+        host = t.cast(str, target.ascii_host)
+
+        def application(
+            environ: "WSGIEnvironment", start_response: "StartResponse"
+        ) -> t.Iterable[bytes]:
+            headers = list(EnvironHeaders(environ).items())
+            headers[:] = [
+                (k, v)
+                for k, v in headers
+                if not is_hop_by_hop_header(k)
+                and k.lower() not in ("content-length", "host")
+            ]
+            headers.append(("Connection", "close"))
+
+            if opts["host"] == "<auto>":
+                headers.append(("Host", host))
+            elif opts["host"] is None:
+                headers.append(("Host", environ["HTTP_HOST"]))
+            else:
+                headers.append(("Host", opts["host"]))
+
+            headers.extend(opts["headers"].items())
+            remote_path = path
+
+            if opts["remove_prefix"]:
+                remote_path = remote_path[len(prefix) :].lstrip("/")
+                remote_path = f"{target.path.rstrip('/')}/{remote_path}"
+
+            content_length = environ.get("CONTENT_LENGTH")
+            chunked = False
+
+            if content_length not in ("", None):
+                headers.append(("Content-Length", content_length))  # type: ignore
+            elif content_length is not None:
+                headers.append(("Transfer-Encoding", "chunked"))
+                chunked = True
+
+            try:
+                if target.scheme == "http":
+                    con = client.HTTPConnection(
+                        host, target.port or 80, timeout=self.timeout
+                    )
+                elif target.scheme == "https":
+                    con = client.HTTPSConnection(
+                        host,
+                        target.port or 443,
+                        timeout=self.timeout,
+                        context=opts["ssl_context"],
+                    )
+                else:
+                    raise RuntimeError(
+                        "Target scheme must be 'http' or 'https', got"
+                        f" {target.scheme!r}."
+                    )
+
+                con.connect()
+                remote_url = url_quote(remote_path)
+                querystring = environ["QUERY_STRING"]
+
+                if querystring:
+                    remote_url = f"{remote_url}?{querystring}"
+
+                con.putrequest(environ["REQUEST_METHOD"], remote_url, skip_host=True)
+
+                for k, v in headers:
+                    if k.lower() == "connection":
+                        v = "close"
+
+                    con.putheader(k, v)
+
+                con.endheaders()
+                stream = get_input_stream(environ)
+
+                while True:
+                    data = stream.read(self.chunk_size)
+
+                    if not data:
+                        break
+
+                    if chunked:
+                        con.send(b"%x\r\n%s\r\n" % (len(data), data))
+                    else:
+                        con.send(data)
+
+                resp = con.getresponse()
+            except OSError:
+                from ..exceptions import BadGateway
+
+                return BadGateway()(environ, start_response)
+
+            start_response(
+                f"{resp.status} {resp.reason}",
+                [
+                    (k.title(), v)
+                    for k, v in resp.getheaders()
+                    if not is_hop_by_hop_header(k)
+                ],
+            )
+
+            def read() -> t.Iterator[bytes]:
+                while True:
+                    try:
+                        data = resp.read(self.chunk_size)
+                    except OSError:
+                        break
+
+                    if not data:
+                        break
+
+                    yield data
+
+            return read()
+
+        return application
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        path = environ["PATH_INFO"]
+        app = self.app
+
+        for prefix, opts in self.targets.items():
+            if path.startswith(prefix):
+                app = self.proxy_to(opts, path, prefix)
+                break
+
+        return app(environ, start_response)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/lint.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/lint.py
new file mode 100644
index 00000000..80c423dd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/lint.py
@@ -0,0 +1,420 @@
+"""
+WSGI Protocol Linter
+====================
+
+This module provides a middleware that performs sanity checks on the
+behavior of the WSGI server and application. It checks that the
+:pep:`3333` WSGI spec is properly implemented. It also warns on some
+common HTTP errors such as non-empty responses for 304 status codes.
+
+.. autoclass:: LintMiddleware
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import typing as t
+from types import TracebackType
+from urllib.parse import urlparse
+from warnings import warn
+
+from ..datastructures import Headers
+from ..http import is_entity_header
+from ..wsgi import FileWrapper
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class WSGIWarning(Warning):
+    """Warning class for WSGI warnings."""
+
+
+class HTTPWarning(Warning):
+    """Warning class for HTTP warnings."""
+
+
+def check_type(context: str, obj: object, need: t.Type = str) -> None:
+    if type(obj) is not need:
+        warn(
+            f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.",
+            WSGIWarning,
+            stacklevel=3,
+        )
+
+
+class InputStream:
+    def __init__(self, stream: t.BinaryIO) -> None:
+        self._stream = stream
+
+    def read(self, *args: t.Any) -> bytes:
+        if len(args) == 0:
+            warn(
+                "WSGI does not guarantee an EOF marker on the input stream, thus making"
+                " calls to 'wsgi.input.read()' unsafe. Conforming servers may never"
+                " return from this call.",
+                WSGIWarning,
+                stacklevel=2,
+            )
+        elif len(args) != 1:
+            warn(
+                "Too many parameters passed to 'wsgi.input.read()'.",
+                WSGIWarning,
+                stacklevel=2,
+            )
+        return self._stream.read(*args)
+
+    def readline(self, *args: t.Any) -> bytes:
+        if len(args) == 0:
+            warn(
+                "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use"
+                " 'wsgi.input.read()' instead.",
+                WSGIWarning,
+                stacklevel=2,
+            )
+        elif len(args) == 1:
+            warn(
+                "'wsgi.input.readline()' was called with a size hint. WSGI does not"
+                " support this, although it's available on all major servers.",
+                WSGIWarning,
+                stacklevel=2,
+            )
+        else:
+            raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.")
+        return self._stream.readline(*args)
+
+    def __iter__(self) -> t.Iterator[bytes]:
+        try:
+            return iter(self._stream)
+        except TypeError:
+            warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2)
+            return iter(())
+
+    def close(self) -> None:
+        warn("The application closed the input stream!", WSGIWarning, stacklevel=2)
+        self._stream.close()
+
+
+class ErrorStream:
+    def __init__(self, stream: t.TextIO) -> None:
+        self._stream = stream
+
+    def write(self, s: str) -> None:
+        check_type("wsgi.error.write()", s, str)
+        self._stream.write(s)
+
+    def flush(self) -> None:
+        self._stream.flush()
+
+    def writelines(self, seq: t.Iterable[str]) -> None:
+        for line in seq:
+            self.write(line)
+
+    def close(self) -> None:
+        warn("The application closed the error stream!", WSGIWarning, stacklevel=2)
+        self._stream.close()
+
+
+class GuardedWrite:
+    def __init__(self, write: t.Callable[[bytes], None], chunks: t.List[int]) -> None:
+        self._write = write
+        self._chunks = chunks
+
+    def __call__(self, s: bytes) -> None:
+        check_type("write()", s, bytes)
+        self._write(s)
+        self._chunks.append(len(s))
+
+
+class GuardedIterator:
+    def __init__(
+        self,
+        iterator: t.Iterable[bytes],
+        headers_set: t.Tuple[int, Headers],
+        chunks: t.List[int],
+    ) -> None:
+        self._iterator = iterator
+        self._next = iter(iterator).__next__
+        self.closed = False
+        self.headers_set = headers_set
+        self.chunks = chunks
+
+    def __iter__(self) -> "GuardedIterator":
+        return self
+
+    def __next__(self) -> bytes:
+        if self.closed:
+            warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2)
+
+        rv = self._next()
+
+        if not self.headers_set:
+            warn(
+                "The application returned before it started the response.",
+                WSGIWarning,
+                stacklevel=2,
+            )
+
+        check_type("application iterator items", rv, bytes)
+        self.chunks.append(len(rv))
+        return rv
+
+    def close(self) -> None:
+        self.closed = True
+
+        if hasattr(self._iterator, "close"):
+            self._iterator.close()  # type: ignore
+
+        if self.headers_set:
+            status_code, headers = self.headers_set
+            bytes_sent = sum(self.chunks)
+            content_length = headers.get("content-length", type=int)
+
+            if status_code == 304:
+                for key, _value in headers:
+                    key = key.lower()
+                    if key not in ("expires", "content-location") and is_entity_header(
+                        key
+                    ):
+                        warn(
+                            f"Entity header {key!r} found in 304 response.", HTTPWarning
+                        )
+                if bytes_sent:
+                    warn("304 responses must not have a body.", HTTPWarning)
+            elif 100 <= status_code < 200 or status_code == 204:
+                if content_length != 0:
+                    warn(
+                        f"{status_code} responses must have an empty content length.",
+                        HTTPWarning,
+                    )
+                if bytes_sent:
+                    warn(f"{status_code} responses must not have a body.", HTTPWarning)
+            elif content_length is not None and content_length != bytes_sent:
+                warn(
+                    "Content-Length and the number of bytes sent to the"
+                    " client do not match.",
+                    WSGIWarning,
+                )
+
+    def __del__(self) -> None:
+        if not self.closed:
+            try:
+                warn(
+                    "Iterator was garbage collected before it was closed.", WSGIWarning
+                )
+            except Exception:
+                pass
+
+
+class LintMiddleware:
+    """Warns about common errors in the WSGI and HTTP behavior of the
+    server and wrapped application. Some of the issues it checks are:
+
+    -   invalid status codes
+    -   non-bytes sent to the WSGI server
+    -   strings returned from the WSGI application
+    -   non-empty conditional responses
+    -   unquoted etags
+    -   relative URLs in the Location header
+    -   unsafe calls to wsgi.input
+    -   unclosed iterators
+
+    Error information is emitted using the :mod:`warnings` module.
+
+    :param app: The WSGI application to wrap.
+
+    .. code-block:: python
+
+        from werkzeug.middleware.lint import LintMiddleware
+        app = LintMiddleware(app)
+    """
+
+    def __init__(self, app: "WSGIApplication") -> None:
+        self.app = app
+
+    def check_environ(self, environ: "WSGIEnvironment") -> None:
+        if type(environ) is not dict:
+            warn(
+                "WSGI environment is not a standard Python dict.",
+                WSGIWarning,
+                stacklevel=4,
+            )
+        for key in (
+            "REQUEST_METHOD",
+            "SERVER_NAME",
+            "SERVER_PORT",
+            "wsgi.version",
+            "wsgi.input",
+            "wsgi.errors",
+            "wsgi.multithread",
+            "wsgi.multiprocess",
+            "wsgi.run_once",
+        ):
+            if key not in environ:
+                warn(
+                    f"Required environment key {key!r} not found",
+                    WSGIWarning,
+                    stacklevel=3,
+                )
+        if environ["wsgi.version"] != (1, 0):
+            warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3)
+
+        script_name = environ.get("SCRIPT_NAME", "")
+        path_info = environ.get("PATH_INFO", "")
+
+        if script_name and script_name[0] != "/":
+            warn(
+                f"'SCRIPT_NAME' does not start with a slash: {script_name!r}",
+                WSGIWarning,
+                stacklevel=3,
+            )
+
+        if path_info and path_info[0] != "/":
+            warn(
+                f"'PATH_INFO' does not start with a slash: {path_info!r}",
+                WSGIWarning,
+                stacklevel=3,
+            )
+
+    def check_start_response(
+        self,
+        status: str,
+        headers: t.List[t.Tuple[str, str]],
+        exc_info: t.Optional[
+            t.Tuple[t.Type[BaseException], BaseException, TracebackType]
+        ],
+    ) -> t.Tuple[int, Headers]:
+        check_type("status", status, str)
+        status_code_str = status.split(None, 1)[0]
+
+        if len(status_code_str) != 3 or not status_code_str.isdigit():
+            warn("Status code must be three digits.", WSGIWarning, stacklevel=3)
+
+        if len(status) < 4 or status[3] != " ":
+            warn(
+                f"Invalid value for status {status!r}. Valid status strings are three"
+                " digits, a space and a status explanation.",
+                WSGIWarning,
+                stacklevel=3,
+            )
+
+        status_code = int(status_code_str)
+
+        if status_code < 100:
+            warn("Status code < 100 detected.", WSGIWarning, stacklevel=3)
+
+        if type(headers) is not list:
+            warn("Header list is not a list.", WSGIWarning, stacklevel=3)
+
+        for item in headers:
+            if type(item) is not tuple or len(item) != 2:
+                warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3)
+            name, value = item
+            if type(name) is not str or type(value) is not str:
+                warn(
+                    "Header keys and values must be strings.", WSGIWarning, stacklevel=3
+                )
+            if name.lower() == "status":
+                warn(
+                    "The status header is not supported due to"
+                    " conflicts with the CGI spec.",
+                    WSGIWarning,
+                    stacklevel=3,
+                )
+
+        if exc_info is not None and not isinstance(exc_info, tuple):
+            warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3)
+
+        headers = Headers(headers)
+        self.check_headers(headers)
+
+        return status_code, headers
+
+    def check_headers(self, headers: Headers) -> None:
+        etag = headers.get("etag")
+
+        if etag is not None:
+            if etag.startswith(("W/", "w/")):
+                if etag.startswith("w/"):
+                    warn(
+                        "Weak etag indicator should be upper case.",
+                        HTTPWarning,
+                        stacklevel=4,
+                    )
+
+                etag = etag[2:]
+
+            if not (etag[:1] == etag[-1:] == '"'):
+                warn("Unquoted etag emitted.", HTTPWarning, stacklevel=4)
+
+        location = headers.get("location")
+
+        if location is not None:
+            if not urlparse(location).netloc:
+                warn(
+                    "Absolute URLs required for location header.",
+                    HTTPWarning,
+                    stacklevel=4,
+                )
+
+    def check_iterator(self, app_iter: t.Iterable[bytes]) -> None:
+        if isinstance(app_iter, bytes):
+            warn(
+                "The application returned a bytestring. The response will send one"
+                " character at a time to the client, which will kill performance."
+                " Return a list or iterable instead.",
+                WSGIWarning,
+                stacklevel=3,
+            )
+
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Iterable[bytes]:
+        if len(args) != 2:
+            warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2)
+
+        if kwargs:
+            warn(
+                "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2
+            )
+
+        environ: "WSGIEnvironment" = args[0]
+        start_response: "StartResponse" = args[1]
+
+        self.check_environ(environ)
+        environ["wsgi.input"] = InputStream(environ["wsgi.input"])
+        environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"])
+
+        # Hook our own file wrapper in so that applications will always
+        # iterate to the end and we can check the content length.
+        environ["wsgi.file_wrapper"] = FileWrapper
+
+        headers_set: t.List[t.Any] = []
+        chunks: t.List[int] = []
+
+        def checking_start_response(
+            *args: t.Any, **kwargs: t.Any
+        ) -> t.Callable[[bytes], None]:
+            if len(args) not in {2, 3}:
+                warn(
+                    f"Invalid number of arguments: {len(args)}, expected 2 or 3.",
+                    WSGIWarning,
+                    stacklevel=2,
+                )
+
+            if kwargs:
+                warn("'start_response' does not take keyword arguments.", WSGIWarning)
+
+            status: str = args[0]
+            headers: t.List[t.Tuple[str, str]] = args[1]
+            exc_info: t.Optional[
+                t.Tuple[t.Type[BaseException], BaseException, TracebackType]
+            ] = (args[2] if len(args) == 3 else None)
+
+            headers_set[:] = self.check_start_response(status, headers, exc_info)
+            return GuardedWrite(start_response(status, headers, exc_info), chunks)
+
+        app_iter = self.app(environ, t.cast("StartResponse", checking_start_response))
+        self.check_iterator(app_iter)
+        return GuardedIterator(
+            app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks
+        )
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/profiler.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/profiler.py
new file mode 100644
index 00000000..0992f8f1
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/profiler.py
@@ -0,0 +1,139 @@
+"""
+Application Profiler
+====================
+
+This module provides a middleware that profiles each request with the
+:mod:`cProfile` module. This can help identify bottlenecks in your code
+that may be slowing down your application.
+
+.. autoclass:: ProfilerMiddleware
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import os.path
+import sys
+import time
+import typing as t
+from pstats import Stats
+
+try:
+    from cProfile import Profile
+except ImportError:
+    from profile import Profile  # type: ignore
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class ProfilerMiddleware:
+    """Wrap a WSGI application and profile the execution of each
+    request. Responses are buffered so that timings are more exact.
+
+    If ``stream`` is given, :class:`pstats.Stats` are written to it
+    after each request. If ``profile_dir`` is given, :mod:`cProfile`
+    data files are saved to that directory, one file per request.
+
+    The filename can be customized by passing ``filename_format``. If
+    it is a string, it will be formatted using :meth:`str.format` with
+    the following fields available:
+
+    -   ``{method}`` - The request method; GET, POST, etc.
+    -   ``{path}`` - The request path or 'root' should one not exist.
+    -   ``{elapsed}`` - The elapsed time of the request.
+    -   ``{time}`` - The time of the request.
+
+    If it is a callable, it will be called with the WSGI ``environ``
+    dict and should return a filename.
+
+    :param app: The WSGI application to wrap.
+    :param stream: Write stats to this stream. Disable with ``None``.
+    :param sort_by: A tuple of columns to sort stats by. See
+        :meth:`pstats.Stats.sort_stats`.
+    :param restrictions: A tuple of restrictions to filter stats by. See
+        :meth:`pstats.Stats.print_stats`.
+    :param profile_dir: Save profile data files to this directory.
+    :param filename_format: Format string for profile data file names,
+        or a callable returning a name. See explanation above.
+
+    .. code-block:: python
+
+        from werkzeug.middleware.profiler import ProfilerMiddleware
+        app = ProfilerMiddleware(app)
+
+    .. versionchanged:: 0.15
+        Stats are written even if ``profile_dir`` is given, and can be
+        disable by passing ``stream=None``.
+
+    .. versionadded:: 0.15
+        Added ``filename_format``.
+
+    .. versionadded:: 0.9
+        Added ``restrictions`` and ``profile_dir``.
+    """
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        stream: t.TextIO = sys.stdout,
+        sort_by: t.Iterable[str] = ("time", "calls"),
+        restrictions: t.Iterable[t.Union[str, int, float]] = (),
+        profile_dir: t.Optional[str] = None,
+        filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof",
+    ) -> None:
+        self._app = app
+        self._stream = stream
+        self._sort_by = sort_by
+        self._restrictions = restrictions
+        self._profile_dir = profile_dir
+        self._filename_format = filename_format
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        response_body: t.List[bytes] = []
+
+        def catching_start_response(status, headers, exc_info=None):  # type: ignore
+            start_response(status, headers, exc_info)
+            return response_body.append
+
+        def runapp() -> None:
+            app_iter = self._app(
+                environ, t.cast("StartResponse", catching_start_response)
+            )
+            response_body.extend(app_iter)
+
+            if hasattr(app_iter, "close"):
+                app_iter.close()  # type: ignore
+
+        profile = Profile()
+        start = time.time()
+        profile.runcall(runapp)
+        body = b"".join(response_body)
+        elapsed = time.time() - start
+
+        if self._profile_dir is not None:
+            if callable(self._filename_format):
+                filename = self._filename_format(environ)
+            else:
+                filename = self._filename_format.format(
+                    method=environ["REQUEST_METHOD"],
+                    path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root",
+                    elapsed=elapsed * 1000.0,
+                    time=time.time(),
+                )
+            filename = os.path.join(self._profile_dir, filename)
+            profile.dump_stats(filename)
+
+        if self._stream is not None:
+            stats = Stats(profile, stream=self._stream)
+            stats.sort_stats(*self._sort_by)
+            print("-" * 80, file=self._stream)
+            path_info = environ.get("PATH_INFO", "")
+            print(f"PATH: {path_info!r}", file=self._stream)
+            stats.print_stats(*self._restrictions)
+            print(f"{'-' * 80}\n", file=self._stream)
+
+        return [body]
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/proxy_fix.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/proxy_fix.py
new file mode 100644
index 00000000..e90b1b34
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/proxy_fix.py
@@ -0,0 +1,187 @@
+"""
+X-Forwarded-For Proxy Fix
+=========================
+
+This module provides a middleware that adjusts the WSGI environ based on
+``X-Forwarded-`` headers that proxies in front of an application may
+set.
+
+When an application is running behind a proxy server, WSGI may see the
+request as coming from that server rather than the real client. Proxies
+set various headers to track where the request actually came from.
+
+This middleware should only be used if the application is actually
+behind such a proxy, and should be configured with the number of proxies
+that are chained in front of it. Not all proxies set all the headers.
+Since incoming headers can be faked, you must set how many proxies are
+setting each header so the middleware knows what to trust.
+
+.. autoclass:: ProxyFix
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import typing as t
+
+from ..http import parse_list_header
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class ProxyFix:
+    """Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in
+    front of the application may set.
+
+    -   ``X-Forwarded-For`` sets ``REMOTE_ADDR``.
+    -   ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``.
+    -   ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and
+        ``SERVER_PORT``.
+    -   ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``.
+    -   ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``.
+
+    You must tell the middleware how many proxies set each header so it
+    knows what values to trust. It is a security issue to trust values
+    that came from the client rather than a proxy.
+
+    The original values of the headers are stored in the WSGI
+    environ as ``werkzeug.proxy_fix.orig``, a dict.
+
+    :param app: The WSGI application to wrap.
+    :param x_for: Number of values to trust for ``X-Forwarded-For``.
+    :param x_proto: Number of values to trust for ``X-Forwarded-Proto``.
+    :param x_host: Number of values to trust for ``X-Forwarded-Host``.
+    :param x_port: Number of values to trust for ``X-Forwarded-Port``.
+    :param x_prefix: Number of values to trust for
+        ``X-Forwarded-Prefix``.
+
+    .. code-block:: python
+
+        from werkzeug.middleware.proxy_fix import ProxyFix
+        # App is behind one proxy that sets the -For and -Host headers.
+        app = ProxyFix(app, x_for=1, x_host=1)
+
+    .. versionchanged:: 1.0
+        Deprecated code has been removed:
+
+        *   The ``num_proxies`` argument and attribute.
+        *   The ``get_remote_addr`` method.
+        *   The environ keys ``orig_remote_addr``,
+            ``orig_wsgi_url_scheme``, and ``orig_http_host``.
+
+    .. versionchanged:: 0.15
+        All headers support multiple values. The ``num_proxies``
+        argument is deprecated. Each header is configured with a
+        separate number of trusted proxies.
+
+    .. versionchanged:: 0.15
+        Original WSGI environ values are stored in the
+        ``werkzeug.proxy_fix.orig`` dict. ``orig_remote_addr``,
+        ``orig_wsgi_url_scheme``, and ``orig_http_host`` are deprecated
+        and will be removed in 1.0.
+
+    .. versionchanged:: 0.15
+        Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``.
+
+    .. versionchanged:: 0.15
+        ``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify
+        ``SERVER_NAME`` and ``SERVER_PORT``.
+    """
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        x_for: int = 1,
+        x_proto: int = 1,
+        x_host: int = 0,
+        x_port: int = 0,
+        x_prefix: int = 0,
+    ) -> None:
+        self.app = app
+        self.x_for = x_for
+        self.x_proto = x_proto
+        self.x_host = x_host
+        self.x_port = x_port
+        self.x_prefix = x_prefix
+
+    def _get_real_value(self, trusted: int, value: t.Optional[str]) -> t.Optional[str]:
+        """Get the real value from a list header based on the configured
+        number of trusted proxies.
+
+        :param trusted: Number of values to trust in the header.
+        :param value: Comma separated list header value to parse.
+        :return: The real value, or ``None`` if there are fewer values
+            than the number of trusted proxies.
+
+        .. versionchanged:: 1.0
+            Renamed from ``_get_trusted_comma``.
+
+        .. versionadded:: 0.15
+        """
+        if not (trusted and value):
+            return None
+        values = parse_list_header(value)
+        if len(values) >= trusted:
+            return values[-trusted]
+        return None
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        """Modify the WSGI environ based on the various ``Forwarded``
+        headers before calling the wrapped application. Store the
+        original environ values in ``werkzeug.proxy_fix.orig_{key}``.
+        """
+        environ_get = environ.get
+        orig_remote_addr = environ_get("REMOTE_ADDR")
+        orig_wsgi_url_scheme = environ_get("wsgi.url_scheme")
+        orig_http_host = environ_get("HTTP_HOST")
+        environ.update(
+            {
+                "werkzeug.proxy_fix.orig": {
+                    "REMOTE_ADDR": orig_remote_addr,
+                    "wsgi.url_scheme": orig_wsgi_url_scheme,
+                    "HTTP_HOST": orig_http_host,
+                    "SERVER_NAME": environ_get("SERVER_NAME"),
+                    "SERVER_PORT": environ_get("SERVER_PORT"),
+                    "SCRIPT_NAME": environ_get("SCRIPT_NAME"),
+                }
+            }
+        )
+
+        x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR"))
+        if x_for:
+            environ["REMOTE_ADDR"] = x_for
+
+        x_proto = self._get_real_value(
+            self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO")
+        )
+        if x_proto:
+            environ["wsgi.url_scheme"] = x_proto
+
+        x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST"))
+        if x_host:
+            environ["HTTP_HOST"] = x_host
+            parts = x_host.split(":", 1)
+            environ["SERVER_NAME"] = parts[0]
+            if len(parts) == 2:
+                environ["SERVER_PORT"] = parts[1]
+
+        x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT"))
+        if x_port:
+            host = environ.get("HTTP_HOST")
+            if host:
+                parts = host.split(":", 1)
+                host = parts[0] if len(parts) == 2 else host
+                environ["HTTP_HOST"] = f"{host}:{x_port}"
+            environ["SERVER_PORT"] = x_port
+
+        x_prefix = self._get_real_value(
+            self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX")
+        )
+        if x_prefix:
+            environ["SCRIPT_NAME"] = x_prefix
+
+        return self.app(environ, start_response)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/middleware/shared_data.py b/venv/lib/python3.7/site-packages/werkzeug/middleware/shared_data.py
new file mode 100644
index 00000000..f11b43a0
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/middleware/shared_data.py
@@ -0,0 +1,320 @@
+"""
+Serve Shared Static Files
+=========================
+
+.. autoclass:: SharedDataMiddleware
+    :members: is_allowed
+
+:copyright: 2007 Pallets
+:license: BSD-3-Clause
+"""
+import mimetypes
+import os
+import pkgutil
+import posixpath
+import typing as t
+from datetime import datetime
+from datetime import timezone
+from io import BytesIO
+from time import time
+from zlib import adler32
+
+from ..filesystem import get_filesystem_encoding
+from ..http import http_date
+from ..http import is_resource_modified
+from ..security import safe_join
+from ..utils import get_content_type
+from ..wsgi import get_path_info
+from ..wsgi import wrap_file
+
+_TOpener = t.Callable[[], t.Tuple[t.BinaryIO, datetime, int]]
+_TLoader = t.Callable[[t.Optional[str]], t.Tuple[t.Optional[str], t.Optional[_TOpener]]]
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class SharedDataMiddleware:
+
+    """A WSGI middleware which provides static content for development
+    environments or simple server setups. Its usage is quite simple::
+
+        import os
+        from werkzeug.middleware.shared_data import SharedDataMiddleware
+
+        app = SharedDataMiddleware(app, {
+            '/shared': os.path.join(os.path.dirname(__file__), 'shared')
+        })
+
+    The contents of the folder ``./shared`` will now be available on
+    ``http://example.com/shared/``.  This is pretty useful during development
+    because a standalone media server is not required. Files can also be
+    mounted on the root folder and still continue to use the application because
+    the shared data middleware forwards all unhandled requests to the
+    application, even if the requests are below one of the shared folders.
+
+    If `pkg_resources` is available you can also tell the middleware to serve
+    files from package data::
+
+        app = SharedDataMiddleware(app, {
+            '/static': ('myapplication', 'static')
+        })
+
+    This will then serve the ``static`` folder in the `myapplication`
+    Python package.
+
+    The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch`
+    rules for files that are not accessible from the web.  If `cache` is set to
+    `False` no caching headers are sent.
+
+    Currently the middleware does not support non-ASCII filenames. If the
+    encoding on the file system happens to match the encoding of the URI it may
+    work but this could also be by accident. We strongly suggest using ASCII
+    only file names for static files.
+
+    The middleware will guess the mimetype using the Python `mimetype`
+    module.  If it's unable to figure out the charset it will fall back
+    to `fallback_mimetype`.
+
+    :param app: the application to wrap.  If you don't want to wrap an
+                application you can pass it :exc:`NotFound`.
+    :param exports: a list or dict of exported files and folders.
+    :param disallow: a list of :func:`~fnmatch.fnmatch` rules.
+    :param cache: enable or disable caching headers.
+    :param cache_timeout: the cache timeout in seconds for the headers.
+    :param fallback_mimetype: The fallback mimetype for unknown files.
+
+    .. versionchanged:: 1.0
+        The default ``fallback_mimetype`` is
+        ``application/octet-stream``. If a filename looks like a text
+        mimetype, the ``utf-8`` charset is added to it.
+
+    .. versionadded:: 0.6
+        Added ``fallback_mimetype``.
+
+    .. versionchanged:: 0.5
+        Added ``cache_timeout``.
+    """
+
+    def __init__(
+        self,
+        app: "WSGIApplication",
+        exports: t.Union[
+            t.Dict[str, t.Union[str, t.Tuple[str, str]]],
+            t.Iterable[t.Tuple[str, t.Union[str, t.Tuple[str, str]]]],
+        ],
+        disallow: None = None,
+        cache: bool = True,
+        cache_timeout: int = 60 * 60 * 12,
+        fallback_mimetype: str = "application/octet-stream",
+    ) -> None:
+        self.app = app
+        self.exports: t.List[t.Tuple[str, _TLoader]] = []
+        self.cache = cache
+        self.cache_timeout = cache_timeout
+
+        if isinstance(exports, dict):
+            exports = exports.items()
+
+        for key, value in exports:
+            if isinstance(value, tuple):
+                loader = self.get_package_loader(*value)
+            elif isinstance(value, str):
+                if os.path.isfile(value):
+                    loader = self.get_file_loader(value)
+                else:
+                    loader = self.get_directory_loader(value)
+            else:
+                raise TypeError(f"unknown def {value!r}")
+
+            self.exports.append((key, loader))
+
+        if disallow is not None:
+            from fnmatch import fnmatch
+
+            self.is_allowed = lambda x: not fnmatch(x, disallow)
+
+        self.fallback_mimetype = fallback_mimetype
+
+    def is_allowed(self, filename: str) -> bool:
+        """Subclasses can override this method to disallow the access to
+        certain files.  However by providing `disallow` in the constructor
+        this method is overwritten.
+        """
+        return True
+
+    def _opener(self, filename: str) -> _TOpener:
+        return lambda: (
+            open(filename, "rb"),
+            datetime.fromtimestamp(os.path.getmtime(filename), tz=timezone.utc),
+            int(os.path.getsize(filename)),
+        )
+
+    def get_file_loader(self, filename: str) -> _TLoader:
+        return lambda x: (os.path.basename(filename), self._opener(filename))
+
+    def get_package_loader(self, package: str, package_path: str) -> _TLoader:
+        load_time = datetime.now(timezone.utc)
+        provider = pkgutil.get_loader(package)
+
+        if hasattr(provider, "get_resource_reader"):
+            # Python 3
+            reader = provider.get_resource_reader(package)  # type: ignore
+
+            def loader(
+                path: t.Optional[str],
+            ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]:
+                if path is None:
+                    return None, None
+
+                path = safe_join(package_path, path)
+
+                if path is None:
+                    return None, None
+
+                basename = posixpath.basename(path)
+
+                try:
+                    resource = reader.open_resource(path)
+                except OSError:
+                    return None, None
+
+                if isinstance(resource, BytesIO):
+                    return (
+                        basename,
+                        lambda: (resource, load_time, len(resource.getvalue())),
+                    )
+
+                return (
+                    basename,
+                    lambda: (
+                        resource,
+                        datetime.fromtimestamp(
+                            os.path.getmtime(resource.name), tz=timezone.utc
+                        ),
+                        os.path.getsize(resource.name),
+                    ),
+                )
+
+        else:
+            # Python 3.6
+            package_filename = provider.get_filename(package)  # type: ignore
+            is_filesystem = os.path.exists(package_filename)
+            root = os.path.join(os.path.dirname(package_filename), package_path)
+
+            def loader(
+                path: t.Optional[str],
+            ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]:
+                if path is None:
+                    return None, None
+
+                path = safe_join(root, path)
+
+                if path is None:
+                    return None, None
+
+                basename = posixpath.basename(path)
+
+                if is_filesystem:
+                    if not os.path.isfile(path):
+                        return None, None
+
+                    return basename, self._opener(path)
+
+                try:
+                    data = provider.get_data(path)  # type: ignore
+                except OSError:
+                    return None, None
+
+                return basename, lambda: (BytesIO(data), load_time, len(data))
+
+        return loader
+
+    def get_directory_loader(self, directory: str) -> _TLoader:
+        def loader(
+            path: t.Optional[str],
+        ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]:
+            if path is not None:
+                path = safe_join(directory, path)
+
+                if path is None:
+                    return None, None
+            else:
+                path = directory
+
+            if os.path.isfile(path):
+                return os.path.basename(path), self._opener(path)
+
+            return None, None
+
+        return loader
+
+    def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str:
+        if not isinstance(real_filename, bytes):
+            real_filename = real_filename.encode(  # type: ignore
+                get_filesystem_encoding()
+            )
+
+        timestamp = mtime.timestamp()
+        checksum = adler32(real_filename) & 0xFFFFFFFF  # type: ignore
+        return f"wzsdm-{timestamp}-{file_size}-{checksum}"
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        path = get_path_info(environ)
+        file_loader = None
+
+        for search_path, loader in self.exports:
+            if search_path == path:
+                real_filename, file_loader = loader(None)
+
+                if file_loader is not None:
+                    break
+
+            if not search_path.endswith("/"):
+                search_path += "/"
+
+            if path.startswith(search_path):
+                real_filename, file_loader = loader(path[len(search_path) :])
+
+                if file_loader is not None:
+                    break
+
+        if file_loader is None or not self.is_allowed(real_filename):  # type: ignore
+            return self.app(environ, start_response)
+
+        guessed_type = mimetypes.guess_type(real_filename)  # type: ignore
+        mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, "utf-8")
+        f, mtime, file_size = file_loader()
+
+        headers = [("Date", http_date())]
+
+        if self.cache:
+            timeout = self.cache_timeout
+            etag = self.generate_etag(mtime, file_size, real_filename)  # type: ignore
+            headers += [
+                ("Etag", f'"{etag}"'),
+                ("Cache-Control", f"max-age={timeout}, public"),
+            ]
+
+            if not is_resource_modified(environ, etag, last_modified=mtime):
+                f.close()
+                start_response("304 Not Modified", headers)
+                return []
+
+            headers.append(("Expires", http_date(time() + timeout)))
+        else:
+            headers.append(("Cache-Control", "public"))
+
+        headers.extend(
+            (
+                ("Content-Type", mime_type),
+                ("Content-Length", str(file_size)),
+                ("Last-Modified", http_date(mtime)),
+            )
+        )
+        start_response("200 OK", headers)
+        return wrap_file(environ, f)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/py.typed b/venv/lib/python3.7/site-packages/werkzeug/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/werkzeug/routing.py b/venv/lib/python3.7/site-packages/werkzeug/routing.py
new file mode 100644
index 00000000..10438759
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/routing.py
@@ -0,0 +1,2332 @@
+"""When it comes to combining multiple controller or view functions
+(however you want to call them) you need a dispatcher. A simple way
+would be applying regular expression tests on the ``PATH_INFO`` and
+calling registered callback functions that return the value then.
+
+This module implements a much more powerful system than simple regular
+expression matching because it can also convert values in the URLs and
+build URLs.
+
+Here a simple example that creates a URL map for an application with
+two subdomains (www and kb) and some URL rules:
+
+.. code-block:: python
+
+    m = Map([
+        # Static URLs
+        Rule('/', endpoint='static/index'),
+        Rule('/about', endpoint='static/about'),
+        Rule('/help', endpoint='static/help'),
+        # Knowledge Base
+        Subdomain('kb', [
+            Rule('/', endpoint='kb/index'),
+            Rule('/browse/', endpoint='kb/browse'),
+            Rule('/browse/<int:id>/', endpoint='kb/browse'),
+            Rule('/browse/<int:id>/<int:page>', endpoint='kb/browse')
+        ])
+    ], default_subdomain='www')
+
+If the application doesn't use subdomains it's perfectly fine to not set
+the default subdomain and not use the `Subdomain` rule factory. The
+endpoint in the rules can be anything, for example import paths or
+unique identifiers. The WSGI application can use those endpoints to get the
+handler for that URL.  It doesn't have to be a string at all but it's
+recommended.
+
+Now it's possible to create a URL adapter for one of the subdomains and
+build URLs:
+
+.. code-block:: python
+
+    c = m.bind('example.com')
+
+    c.build("kb/browse", dict(id=42))
+    'http://kb.example.com/browse/42/'
+
+    c.build("kb/browse", dict())
+    'http://kb.example.com/browse/'
+
+    c.build("kb/browse", dict(id=42, page=3))
+    'http://kb.example.com/browse/42/3'
+
+    c.build("static/about")
+    '/about'
+
+    c.build("static/index", force_external=True)
+    'http://www.example.com/'
+
+    c = m.bind('example.com', subdomain='kb')
+
+    c.build("static/about")
+    'http://www.example.com/about'
+
+The first argument to bind is the server name *without* the subdomain.
+Per default it will assume that the script is mounted on the root, but
+often that's not the case so you can provide the real mount point as
+second argument:
+
+.. code-block:: python
+
+    c = m.bind('example.com', '/applications/example')
+
+The third argument can be the subdomain, if not given the default
+subdomain is used.  For more details about binding have a look at the
+documentation of the `MapAdapter`.
+
+And here is how you can match URLs:
+
+.. code-block:: python
+
+    c = m.bind('example.com')
+
+    c.match("/")
+    ('static/index', {})
+
+    c.match("/about")
+    ('static/about', {})
+
+    c = m.bind('example.com', '/', 'kb')
+
+    c.match("/")
+    ('kb/index', {})
+
+    c.match("/browse/42/23")
+    ('kb/browse', {'id': 42, 'page': 23})
+
+If matching fails you get a ``NotFound`` exception, if the rule thinks
+it's a good idea to redirect (for example because the URL was defined
+to have a slash at the end but the request was missing that slash) it
+will raise a ``RequestRedirect`` exception. Both are subclasses of
+``HTTPException`` so you can use those errors as responses in the
+application.
+
+If matching succeeded but the URL rule was incompatible to the given
+method (for example there were only rules for ``GET`` and ``HEAD`` but
+routing tried to match a ``POST`` request) a ``MethodNotAllowed``
+exception is raised.
+"""
+import ast
+import difflib
+import posixpath
+import re
+import typing
+import typing as t
+import uuid
+import warnings
+from pprint import pformat
+from string import Template
+from threading import Lock
+from types import CodeType
+
+from ._internal import _encode_idna
+from ._internal import _get_environ
+from ._internal import _to_bytes
+from ._internal import _to_str
+from ._internal import _wsgi_decoding_dance
+from .datastructures import ImmutableDict
+from .datastructures import MultiDict
+from .exceptions import BadHost
+from .exceptions import BadRequest
+from .exceptions import HTTPException
+from .exceptions import MethodNotAllowed
+from .exceptions import NotFound
+from .urls import _fast_url_quote
+from .urls import url_encode
+from .urls import url_join
+from .urls import url_quote
+from .utils import cached_property
+from .utils import redirect
+from .wsgi import get_host
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+    from .wrappers.response import Response
+
+_rule_re = re.compile(
+    r"""
+    (?P<static>[^<]*)                           # static rule data
+    <
+    (?:
+        (?P<converter>[a-zA-Z_][a-zA-Z0-9_]*)   # converter name
+        (?:\((?P<args>.*?)\))?                  # converter arguments
+        \:                                      # variable delimiter
+    )?
+    (?P<variable>[a-zA-Z_][a-zA-Z0-9_]*)        # variable name
+    >
+    """,
+    re.VERBOSE,
+)
+_simple_rule_re = re.compile(r"<([^>]+)>")
+_converter_args_re = re.compile(
+    r"""
+    ((?P<name>\w+)\s*=\s*)?
+    (?P<value>
+        True|False|
+        \d+.\d+|
+        \d+.|
+        \d+|
+        [\w\d_.]+|
+        [urUR]?(?P<stringval>"[^"]*?"|'[^']*')
+    )\s*,
+    """,
+    re.VERBOSE,
+)
+
+
+_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False}
+
+
+def _pythonize(value: str) -> t.Union[None, bool, int, float, str]:
+    if value in _PYTHON_CONSTANTS:
+        return _PYTHON_CONSTANTS[value]
+    for convert in int, float:
+        try:
+            return convert(value)  # type: ignore
+        except ValueError:
+            pass
+    if value[:1] == value[-1:] and value[0] in "\"'":
+        value = value[1:-1]
+    return str(value)
+
+
+def parse_converter_args(argstr: str) -> t.Tuple[t.Tuple, t.Dict[str, t.Any]]:
+    argstr += ","
+    args = []
+    kwargs = {}
+
+    for item in _converter_args_re.finditer(argstr):
+        value = item.group("stringval")
+        if value is None:
+            value = item.group("value")
+        value = _pythonize(value)
+        if not item.group("name"):
+            args.append(value)
+        else:
+            name = item.group("name")
+            kwargs[name] = value
+
+    return tuple(args), kwargs
+
+
+def parse_rule(rule: str) -> t.Iterator[t.Tuple[t.Optional[str], t.Optional[str], str]]:
+    """Parse a rule and return it as generator. Each iteration yields tuples
+    in the form ``(converter, arguments, variable)``. If the converter is
+    `None` it's a static url part, otherwise it's a dynamic one.
+
+    :internal:
+    """
+    pos = 0
+    end = len(rule)
+    do_match = _rule_re.match
+    used_names = set()
+    while pos < end:
+        m = do_match(rule, pos)
+        if m is None:
+            break
+        data = m.groupdict()
+        if data["static"]:
+            yield None, None, data["static"]
+        variable = data["variable"]
+        converter = data["converter"] or "default"
+        if variable in used_names:
+            raise ValueError(f"variable name {variable!r} used twice.")
+        used_names.add(variable)
+        yield converter, data["args"] or None, variable
+        pos = m.end()
+    if pos < end:
+        remaining = rule[pos:]
+        if ">" in remaining or "<" in remaining:
+            raise ValueError(f"malformed url rule: {rule!r}")
+        yield None, None, remaining
+
+
+class RoutingException(Exception):
+    """Special exceptions that require the application to redirect, notifying
+    about missing urls, etc.
+
+    :internal:
+    """
+
+
+class RequestRedirect(HTTPException, RoutingException):
+    """Raise if the map requests a redirect. This is for example the case if
+    `strict_slashes` are activated and an url that requires a trailing slash.
+
+    The attribute `new_url` contains the absolute destination url.
+    """
+
+    code = 308
+
+    def __init__(self, new_url: str) -> None:
+        super().__init__(new_url)
+        self.new_url = new_url
+
+    def get_response(
+        self,
+        environ: t.Optional["WSGIEnvironment"] = None,
+        scope: t.Optional[dict] = None,
+    ) -> "Response":
+        return redirect(self.new_url, self.code)
+
+
+class RequestPath(RoutingException):
+    """Internal exception."""
+
+    __slots__ = ("path_info",)
+
+    def __init__(self, path_info: str) -> None:
+        super().__init__()
+        self.path_info = path_info
+
+
+class RequestAliasRedirect(RoutingException):  # noqa: B903
+    """This rule is an alias and wants to redirect to the canonical URL."""
+
+    def __init__(self, matched_values: t.Mapping[str, t.Any]) -> None:
+        super().__init__()
+        self.matched_values = matched_values
+
+
+class BuildError(RoutingException, LookupError):
+    """Raised if the build system cannot find a URL for an endpoint with the
+    values provided.
+    """
+
+    def __init__(
+        self,
+        endpoint: str,
+        values: t.Mapping[str, t.Any],
+        method: t.Optional[str],
+        adapter: t.Optional["MapAdapter"] = None,
+    ) -> None:
+        super().__init__(endpoint, values, method)
+        self.endpoint = endpoint
+        self.values = values
+        self.method = method
+        self.adapter = adapter
+
+    @cached_property
+    def suggested(self) -> t.Optional["Rule"]:
+        return self.closest_rule(self.adapter)
+
+    def closest_rule(self, adapter: t.Optional["MapAdapter"]) -> t.Optional["Rule"]:
+        def _score_rule(rule: "Rule") -> float:
+            return sum(
+                [
+                    0.98
+                    * difflib.SequenceMatcher(
+                        None, rule.endpoint, self.endpoint
+                    ).ratio(),
+                    0.01 * bool(set(self.values or ()).issubset(rule.arguments)),
+                    0.01 * bool(rule.methods and self.method in rule.methods),
+                ]
+            )
+
+        if adapter and adapter.map._rules:
+            return max(adapter.map._rules, key=_score_rule)
+
+        return None
+
+    def __str__(self) -> str:
+        message = [f"Could not build url for endpoint {self.endpoint!r}"]
+        if self.method:
+            message.append(f" ({self.method!r})")
+        if self.values:
+            message.append(f" with values {sorted(self.values)!r}")
+        message.append(".")
+        if self.suggested:
+            if self.endpoint == self.suggested.endpoint:
+                if (
+                    self.method
+                    and self.suggested.methods is not None
+                    and self.method not in self.suggested.methods
+                ):
+                    message.append(
+                        " Did you mean to use methods"
+                        f" {sorted(self.suggested.methods)!r}?"
+                    )
+                missing_values = self.suggested.arguments.union(
+                    set(self.suggested.defaults or ())
+                ) - set(self.values.keys())
+                if missing_values:
+                    message.append(
+                        f" Did you forget to specify values {sorted(missing_values)!r}?"
+                    )
+            else:
+                message.append(f" Did you mean {self.suggested.endpoint!r} instead?")
+        return "".join(message)
+
+
+class WebsocketMismatch(BadRequest):
+    """The only matched rule is either a WebSocket and the request is
+    HTTP, or the rule is HTTP and the request is a WebSocket.
+    """
+
+
+class ValidationError(ValueError):
+    """Validation error.  If a rule converter raises this exception the rule
+    does not match the current URL and the next URL is tried.
+    """
+
+
+class RuleFactory:
+    """As soon as you have more complex URL setups it's a good idea to use rule
+    factories to avoid repetitive tasks.  Some of them are builtin, others can
+    be added by subclassing `RuleFactory` and overriding `get_rules`.
+    """
+
+    def get_rules(self, map: "Map") -> t.Iterable["Rule"]:
+        """Subclasses of `RuleFactory` have to override this method and return
+        an iterable of rules."""
+        raise NotImplementedError()
+
+
+class Subdomain(RuleFactory):
+    """All URLs provided by this factory have the subdomain set to a
+    specific domain. For example if you want to use the subdomain for
+    the current language this can be a good setup::
+
+        url_map = Map([
+            Rule('/', endpoint='#select_language'),
+            Subdomain('<string(length=2):lang_code>', [
+                Rule('/', endpoint='index'),
+                Rule('/about', endpoint='about'),
+                Rule('/help', endpoint='help')
+            ])
+        ])
+
+    All the rules except for the ``'#select_language'`` endpoint will now
+    listen on a two letter long subdomain that holds the language code
+    for the current request.
+    """
+
+    def __init__(self, subdomain: str, rules: t.Iterable["Rule"]) -> None:
+        self.subdomain = subdomain
+        self.rules = rules
+
+    def get_rules(self, map: "Map") -> t.Iterator["Rule"]:
+        for rulefactory in self.rules:
+            for rule in rulefactory.get_rules(map):
+                rule = rule.empty()
+                rule.subdomain = self.subdomain
+                yield rule
+
+
+class Submount(RuleFactory):
+    """Like `Subdomain` but prefixes the URL rule with a given string::
+
+        url_map = Map([
+            Rule('/', endpoint='index'),
+            Submount('/blog', [
+                Rule('/', endpoint='blog/index'),
+                Rule('/entry/<entry_slug>', endpoint='blog/show')
+            ])
+        ])
+
+    Now the rule ``'blog/show'`` matches ``/blog/entry/<entry_slug>``.
+    """
+
+    def __init__(self, path: str, rules: t.Iterable["Rule"]) -> None:
+        self.path = path.rstrip("/")
+        self.rules = rules
+
+    def get_rules(self, map: "Map") -> t.Iterator["Rule"]:
+        for rulefactory in self.rules:
+            for rule in rulefactory.get_rules(map):
+                rule = rule.empty()
+                rule.rule = self.path + rule.rule
+                yield rule
+
+
+class EndpointPrefix(RuleFactory):
+    """Prefixes all endpoints (which must be strings for this factory) with
+    another string. This can be useful for sub applications::
+
+        url_map = Map([
+            Rule('/', endpoint='index'),
+            EndpointPrefix('blog/', [Submount('/blog', [
+                Rule('/', endpoint='index'),
+                Rule('/entry/<entry_slug>', endpoint='show')
+            ])])
+        ])
+    """
+
+    def __init__(self, prefix: str, rules: t.Iterable["Rule"]) -> None:
+        self.prefix = prefix
+        self.rules = rules
+
+    def get_rules(self, map: "Map") -> t.Iterator["Rule"]:
+        for rulefactory in self.rules:
+            for rule in rulefactory.get_rules(map):
+                rule = rule.empty()
+                rule.endpoint = self.prefix + rule.endpoint
+                yield rule
+
+
+class RuleTemplate:
+    """Returns copies of the rules wrapped and expands string templates in
+    the endpoint, rule, defaults or subdomain sections.
+
+    Here a small example for such a rule template::
+
+        from werkzeug.routing import Map, Rule, RuleTemplate
+
+        resource = RuleTemplate([
+            Rule('/$name/', endpoint='$name.list'),
+            Rule('/$name/<int:id>', endpoint='$name.show')
+        ])
+
+        url_map = Map([resource(name='user'), resource(name='page')])
+
+    When a rule template is called the keyword arguments are used to
+    replace the placeholders in all the string parameters.
+    """
+
+    def __init__(self, rules: t.Iterable["Rule"]) -> None:
+        self.rules = list(rules)
+
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> "RuleTemplateFactory":
+        return RuleTemplateFactory(self.rules, dict(*args, **kwargs))
+
+
+class RuleTemplateFactory(RuleFactory):
+    """A factory that fills in template variables into rules.  Used by
+    `RuleTemplate` internally.
+
+    :internal:
+    """
+
+    def __init__(self, rules: t.Iterable["Rule"], context: t.Dict[str, t.Any]) -> None:
+        self.rules = rules
+        self.context = context
+
+    def get_rules(self, map: "Map") -> t.Iterator["Rule"]:
+        for rulefactory in self.rules:
+            for rule in rulefactory.get_rules(map):
+                new_defaults = subdomain = None
+                if rule.defaults:
+                    new_defaults = {}
+                    for key, value in rule.defaults.items():
+                        if isinstance(value, str):
+                            value = Template(value).substitute(self.context)
+                        new_defaults[key] = value
+                if rule.subdomain is not None:
+                    subdomain = Template(rule.subdomain).substitute(self.context)
+                new_endpoint = rule.endpoint
+                if isinstance(new_endpoint, str):
+                    new_endpoint = Template(new_endpoint).substitute(self.context)
+                yield Rule(
+                    Template(rule.rule).substitute(self.context),
+                    new_defaults,
+                    subdomain,
+                    rule.methods,
+                    rule.build_only,
+                    new_endpoint,
+                    rule.strict_slashes,
+                )
+
+
+def _prefix_names(src: str) -> ast.stmt:
+    """ast parse and prefix names with `.` to avoid collision with user vars"""
+    tree = ast.parse(src).body[0]
+    if isinstance(tree, ast.Expr):
+        tree = tree.value  # type: ignore
+    for node in ast.walk(tree):
+        if isinstance(node, ast.Name):
+            node.id = f".{node.id}"
+    return tree
+
+
+_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()"
+_IF_KWARGS_URL_ENCODE_CODE = """\
+if kwargs:
+    q = '?'
+    params = self._encode_query_vars(kwargs)
+else:
+    q = params = ''
+"""
+_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE)
+_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params"))
+
+
+class Rule(RuleFactory):
+    """A Rule represents one URL pattern.  There are some options for `Rule`
+    that change the way it behaves and are passed to the `Rule` constructor.
+    Note that besides the rule-string all arguments *must* be keyword arguments
+    in order to not break the application on Werkzeug upgrades.
+
+    `string`
+        Rule strings basically are just normal URL paths with placeholders in
+        the format ``<converter(arguments):name>`` where the converter and the
+        arguments are optional.  If no converter is defined the `default`
+        converter is used which means `string` in the normal configuration.
+
+        URL rules that end with a slash are branch URLs, others are leaves.
+        If you have `strict_slashes` enabled (which is the default), all
+        branch URLs that are matched without a trailing slash will trigger a
+        redirect to the same URL with the missing slash appended.
+
+        The converters are defined on the `Map`.
+
+    `endpoint`
+        The endpoint for this rule. This can be anything. A reference to a
+        function, a string, a number etc.  The preferred way is using a string
+        because the endpoint is used for URL generation.
+
+    `defaults`
+        An optional dict with defaults for other rules with the same endpoint.
+        This is a bit tricky but useful if you want to have unique URLs::
+
+            url_map = Map([
+                Rule('/all/', defaults={'page': 1}, endpoint='all_entries'),
+                Rule('/all/page/<int:page>', endpoint='all_entries')
+            ])
+
+        If a user now visits ``http://example.com/all/page/1`` he will be
+        redirected to ``http://example.com/all/``.  If `redirect_defaults` is
+        disabled on the `Map` instance this will only affect the URL
+        generation.
+
+    `subdomain`
+        The subdomain rule string for this rule. If not specified the rule
+        only matches for the `default_subdomain` of the map.  If the map is
+        not bound to a subdomain this feature is disabled.
+
+        Can be useful if you want to have user profiles on different subdomains
+        and all subdomains are forwarded to your application::
+
+            url_map = Map([
+                Rule('/', subdomain='<username>', endpoint='user/homepage'),
+                Rule('/stats', subdomain='<username>', endpoint='user/stats')
+            ])
+
+    `methods`
+        A sequence of http methods this rule applies to.  If not specified, all
+        methods are allowed. For example this can be useful if you want different
+        endpoints for `POST` and `GET`.  If methods are defined and the path
+        matches but the method matched against is not in this list or in the
+        list of another rule for that path the error raised is of the type
+        `MethodNotAllowed` rather than `NotFound`.  If `GET` is present in the
+        list of methods and `HEAD` is not, `HEAD` is added automatically.
+
+    `strict_slashes`
+        Override the `Map` setting for `strict_slashes` only for this rule. If
+        not specified the `Map` setting is used.
+
+    `merge_slashes`
+        Override :attr:`Map.merge_slashes` for this rule.
+
+    `build_only`
+        Set this to True and the rule will never match but will create a URL
+        that can be build. This is useful if you have resources on a subdomain
+        or folder that are not handled by the WSGI application (like static data)
+
+    `redirect_to`
+        If given this must be either a string or callable.  In case of a
+        callable it's called with the url adapter that triggered the match and
+        the values of the URL as keyword arguments and has to return the target
+        for the redirect, otherwise it has to be a string with placeholders in
+        rule syntax::
+
+            def foo_with_slug(adapter, id):
+                # ask the database for the slug for the old id.  this of
+                # course has nothing to do with werkzeug.
+                return f'foo/{Foo.get_slug_for_id(id)}'
+
+            url_map = Map([
+                Rule('/foo/<slug>', endpoint='foo'),
+                Rule('/some/old/url/<slug>', redirect_to='foo/<slug>'),
+                Rule('/other/old/url/<int:id>', redirect_to=foo_with_slug)
+            ])
+
+        When the rule is matched the routing system will raise a
+        `RequestRedirect` exception with the target for the redirect.
+
+        Keep in mind that the URL will be joined against the URL root of the
+        script so don't use a leading slash on the target URL unless you
+        really mean root of that domain.
+
+    `alias`
+        If enabled this rule serves as an alias for another rule with the same
+        endpoint and arguments.
+
+    `host`
+        If provided and the URL map has host matching enabled this can be
+        used to provide a match rule for the whole host.  This also means
+        that the subdomain feature is disabled.
+
+    `websocket`
+        If ``True``, this rule is only matches for WebSocket (``ws://``,
+        ``wss://``) requests. By default, rules will only match for HTTP
+        requests.
+
+    .. versionadded:: 1.0
+        Added ``websocket``.
+
+    .. versionadded:: 1.0
+        Added ``merge_slashes``.
+
+    .. versionadded:: 0.7
+        Added ``alias`` and ``host``.
+
+    .. versionchanged:: 0.6.1
+       ``HEAD`` is added to ``methods`` if ``GET`` is present.
+    """
+
+    def __init__(
+        self,
+        string: str,
+        defaults: t.Optional[t.Mapping[str, t.Any]] = None,
+        subdomain: t.Optional[str] = None,
+        methods: t.Optional[t.Iterable[str]] = None,
+        build_only: bool = False,
+        endpoint: t.Optional[str] = None,
+        strict_slashes: t.Optional[bool] = None,
+        merge_slashes: t.Optional[bool] = None,
+        redirect_to: t.Optional[t.Union[str, t.Callable[..., str]]] = None,
+        alias: bool = False,
+        host: t.Optional[str] = None,
+        websocket: bool = False,
+    ) -> None:
+        if not string.startswith("/"):
+            raise ValueError("urls must start with a leading slash")
+        self.rule = string
+        self.is_leaf = not string.endswith("/")
+
+        self.map: "Map" = None  # type: ignore
+        self.strict_slashes = strict_slashes
+        self.merge_slashes = merge_slashes
+        self.subdomain = subdomain
+        self.host = host
+        self.defaults = defaults
+        self.build_only = build_only
+        self.alias = alias
+        self.websocket = websocket
+
+        if methods is not None:
+            if isinstance(methods, str):
+                raise TypeError("'methods' should be a list of strings.")
+
+            methods = {x.upper() for x in methods}
+
+            if "HEAD" not in methods and "GET" in methods:
+                methods.add("HEAD")
+
+            if websocket and methods - {"GET", "HEAD", "OPTIONS"}:
+                raise ValueError(
+                    "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods."
+                )
+
+        self.methods = methods
+        self.endpoint: str = endpoint  # type: ignore
+        self.redirect_to = redirect_to
+
+        if defaults:
+            self.arguments = set(map(str, defaults))
+        else:
+            self.arguments = set()
+
+        self._trace: t.List[t.Tuple[bool, str]] = []
+
+    def empty(self) -> "Rule":
+        """
+        Return an unbound copy of this rule.
+
+        This can be useful if want to reuse an already bound URL for another
+        map.  See ``get_empty_kwargs`` to override what keyword arguments are
+        provided to the new copy.
+        """
+        return type(self)(self.rule, **self.get_empty_kwargs())
+
+    def get_empty_kwargs(self) -> t.Mapping[str, t.Any]:
+        """
+        Provides kwargs for instantiating empty copy with empty()
+
+        Use this method to provide custom keyword arguments to the subclass of
+        ``Rule`` when calling ``some_rule.empty()``.  Helpful when the subclass
+        has custom keyword arguments that are needed at instantiation.
+
+        Must return a ``dict`` that will be provided as kwargs to the new
+        instance of ``Rule``, following the initial ``self.rule`` value which
+        is always provided as the first, required positional argument.
+        """
+        defaults = None
+        if self.defaults:
+            defaults = dict(self.defaults)
+        return dict(
+            defaults=defaults,
+            subdomain=self.subdomain,
+            methods=self.methods,
+            build_only=self.build_only,
+            endpoint=self.endpoint,
+            strict_slashes=self.strict_slashes,
+            redirect_to=self.redirect_to,
+            alias=self.alias,
+            host=self.host,
+        )
+
+    def get_rules(self, map: "Map") -> t.Iterator["Rule"]:
+        yield self
+
+    def refresh(self) -> None:
+        """Rebinds and refreshes the URL.  Call this if you modified the
+        rule in place.
+
+        :internal:
+        """
+        self.bind(self.map, rebind=True)
+
+    def bind(self, map: "Map", rebind: bool = False) -> None:
+        """Bind the url to a map and create a regular expression based on
+        the information from the rule itself and the defaults from the map.
+
+        :internal:
+        """
+        if self.map is not None and not rebind:
+            raise RuntimeError(f"url rule {self!r} already bound to map {self.map!r}")
+        self.map = map
+        if self.strict_slashes is None:
+            self.strict_slashes = map.strict_slashes
+        if self.merge_slashes is None:
+            self.merge_slashes = map.merge_slashes
+        if self.subdomain is None:
+            self.subdomain = map.default_subdomain
+        self.compile()
+
+    def get_converter(
+        self,
+        variable_name: str,
+        converter_name: str,
+        args: t.Tuple,
+        kwargs: t.Mapping[str, t.Any],
+    ) -> "BaseConverter":
+        """Looks up the converter for the given parameter.
+
+        .. versionadded:: 0.9
+        """
+        if converter_name not in self.map.converters:
+            raise LookupError(f"the converter {converter_name!r} does not exist")
+        return self.map.converters[converter_name](self.map, *args, **kwargs)
+
+    def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str:
+        return url_encode(
+            query_vars,
+            charset=self.map.charset,
+            sort=self.map.sort_parameters,
+            key=self.map.sort_key,
+        )
+
+    def compile(self) -> None:
+        """Compiles the regular expression and stores it."""
+        assert self.map is not None, "rule not bound"
+
+        if self.map.host_matching:
+            domain_rule = self.host or ""
+        else:
+            domain_rule = self.subdomain or ""
+
+        self._trace = []
+        self._converters: t.Dict[str, "BaseConverter"] = {}
+        self._static_weights: t.List[t.Tuple[int, int]] = []
+        self._argument_weights: t.List[int] = []
+        regex_parts = []
+
+        def _build_regex(rule: str) -> None:
+            index = 0
+            for converter, arguments, variable in parse_rule(rule):
+                if converter is None:
+                    for match in re.finditer(r"/+|[^/]+", variable):
+                        part = match.group(0)
+                        if part.startswith("/"):
+                            if self.merge_slashes:
+                                regex_parts.append(r"/+?")
+                                self._trace.append((False, "/"))
+                            else:
+                                regex_parts.append(part)
+                                self._trace.append((False, part))
+                            continue
+                        self._trace.append((False, part))
+                        regex_parts.append(re.escape(part))
+                        if part:
+                            self._static_weights.append((index, -len(part)))
+                else:
+                    if arguments:
+                        c_args, c_kwargs = parse_converter_args(arguments)
+                    else:
+                        c_args = ()
+                        c_kwargs = {}
+                    convobj = self.get_converter(variable, converter, c_args, c_kwargs)
+                    regex_parts.append(f"(?P<{variable}>{convobj.regex})")
+                    self._converters[variable] = convobj
+                    self._trace.append((True, variable))
+                    self._argument_weights.append(convobj.weight)
+                    self.arguments.add(str(variable))
+                index = index + 1
+
+        _build_regex(domain_rule)
+        regex_parts.append("\\|")
+        self._trace.append((False, "|"))
+        _build_regex(self.rule if self.is_leaf else self.rule.rstrip("/"))
+        if not self.is_leaf:
+            self._trace.append((False, "/"))
+
+        self._build: t.Callable[..., t.Tuple[str, str]]
+        self._build = self._compile_builder(False).__get__(self, None)  # type: ignore
+        self._build_unknown: t.Callable[..., t.Tuple[str, str]]
+        self._build_unknown = self._compile_builder(True).__get__(  # type: ignore
+            self, None
+        )
+
+        if self.build_only:
+            return
+
+        if not (self.is_leaf and self.strict_slashes):
+            reps = "*" if self.merge_slashes else "?"
+            tail = f"(?<!/)(?P<__suffix__>/{reps})"
+        else:
+            tail = ""
+
+        regex = f"^{''.join(regex_parts)}{tail}$"
+        self._regex = re.compile(regex)
+
+    def match(
+        self, path: str, method: t.Optional[str] = None
+    ) -> t.Optional[t.MutableMapping[str, t.Any]]:
+        """Check if the rule matches a given path. Path is a string in the
+        form ``"subdomain|/path"`` and is assembled by the map.  If
+        the map is doing host matching the subdomain part will be the host
+        instead.
+
+        If the rule matches a dict with the converted values is returned,
+        otherwise the return value is `None`.
+
+        :internal:
+        """
+        if not self.build_only:
+            require_redirect = False
+
+            m = self._regex.search(path)
+            if m is not None:
+                groups = m.groupdict()
+                # we have a folder like part of the url without a trailing
+                # slash and strict slashes enabled. raise an exception that
+                # tells the map to redirect to the same url but with a
+                # trailing slash
+                if (
+                    self.strict_slashes
+                    and not self.is_leaf
+                    and not groups.pop("__suffix__")
+                    and (
+                        method is None or self.methods is None or method in self.methods
+                    )
+                ):
+                    path += "/"
+                    require_redirect = True
+                # if we are not in strict slashes mode we have to remove
+                # a __suffix__
+                elif not self.strict_slashes:
+                    del groups["__suffix__"]
+
+                result = {}
+                for name, value in groups.items():
+                    try:
+                        value = self._converters[name].to_python(value)
+                    except ValidationError:
+                        return None
+                    result[str(name)] = value
+                if self.defaults:
+                    result.update(self.defaults)
+
+                if self.merge_slashes:
+                    new_path = "|".join(self.build(result, False))  # type: ignore
+                    if path.endswith("/") and not new_path.endswith("/"):
+                        new_path += "/"
+                    if new_path.count("/") < path.count("/"):
+                        path = new_path
+                        require_redirect = True
+
+                if require_redirect:
+                    path = path.split("|", 1)[1]
+                    raise RequestPath(path)
+
+                if self.alias and self.map.redirect_defaults:
+                    raise RequestAliasRedirect(result)
+
+                return result
+
+        return None
+
+    @staticmethod
+    def _get_func_code(code: CodeType, name: str) -> t.Callable[..., t.Tuple[str, str]]:
+        globs: t.Dict[str, t.Any] = {}
+        locs: t.Dict[str, t.Any] = {}
+        exec(code, globs, locs)
+        return locs[name]  # type: ignore
+
+    def _compile_builder(
+        self, append_unknown: bool = True
+    ) -> t.Callable[..., t.Tuple[str, str]]:
+        defaults = self.defaults or {}
+        dom_ops: t.List[t.Tuple[bool, str]] = []
+        url_ops: t.List[t.Tuple[bool, str]] = []
+
+        opl = dom_ops
+        for is_dynamic, data in self._trace:
+            if data == "|" and opl is dom_ops:
+                opl = url_ops
+                continue
+            # this seems like a silly case to ever come up but:
+            # if a default is given for a value that appears in the rule,
+            # resolve it to a constant ahead of time
+            if is_dynamic and data in defaults:
+                data = self._converters[data].to_url(defaults[data])
+                opl.append((False, data))
+            elif not is_dynamic:
+                opl.append(
+                    (False, url_quote(_to_bytes(data, self.map.charset), safe="/:|+"))
+                )
+            else:
+                opl.append((True, data))
+
+        def _convert(elem: str) -> ast.stmt:
+            ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem))
+            ret.args = [ast.Name(str(elem), ast.Load())]  # type: ignore  # str for py2
+            return ret
+
+        def _parts(ops: t.List[t.Tuple[bool, str]]) -> t.List[ast.AST]:
+            parts = [
+                _convert(elem) if is_dynamic else ast.Str(s=elem)
+                for is_dynamic, elem in ops
+            ]
+            parts = parts or [ast.Str("")]
+            # constant fold
+            ret = [parts[0]]
+            for p in parts[1:]:
+                if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str):
+                    ret[-1] = ast.Str(ret[-1].s + p.s)
+                else:
+                    ret.append(p)
+            return ret
+
+        dom_parts = _parts(dom_ops)
+        url_parts = _parts(url_ops)
+        if not append_unknown:
+            body = []
+        else:
+            body = [_IF_KWARGS_URL_ENCODE_AST]
+            url_parts.extend(_URL_ENCODE_AST_NAMES)
+
+        def _join(parts: t.List[ast.AST]) -> ast.AST:
+            if len(parts) == 1:  # shortcut
+                return parts[0]
+            return ast.JoinedStr(parts)
+
+        body.append(
+            ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load()))
+        )
+
+        pargs = [
+            elem
+            for is_dynamic, elem in dom_ops + url_ops
+            if is_dynamic and elem not in defaults
+        ]
+        kargs = [str(k) for k in defaults]
+
+        func_ast: ast.FunctionDef = _prefix_names("def _(): pass")  # type: ignore
+        func_ast.name = f"<builder:{self.rule!r}>"
+        func_ast.args.args.append(ast.arg(".self", None))
+        for arg in pargs + kargs:
+            func_ast.args.args.append(ast.arg(arg, None))
+        func_ast.args.kwarg = ast.arg(".kwargs", None)
+        for _ in kargs:
+            func_ast.args.defaults.append(ast.Str(""))
+        func_ast.body = body
+
+        # use `ast.parse` instead of `ast.Module` for better portability
+        # Python 3.8 changes the signature of `ast.Module`
+        module = ast.parse("")
+        module.body = [func_ast]
+
+        # mark everything as on line 1, offset 0
+        # less error-prone than `ast.fix_missing_locations`
+        # bad line numbers cause an assert to fail in debug builds
+        for node in ast.walk(module):
+            if "lineno" in node._attributes:
+                node.lineno = 1
+            if "col_offset" in node._attributes:
+                node.col_offset = 0
+
+        code = compile(module, "<werkzeug routing>", "exec")
+        return self._get_func_code(code, func_ast.name)
+
+    def build(
+        self, values: t.Mapping[str, t.Any], append_unknown: bool = True
+    ) -> t.Optional[t.Tuple[str, str]]:
+        """Assembles the relative url for that rule and the subdomain.
+        If building doesn't work for some reasons `None` is returned.
+
+        :internal:
+        """
+        try:
+            if append_unknown:
+                return self._build_unknown(**values)
+            else:
+                return self._build(**values)
+        except ValidationError:
+            return None
+
+    def provides_defaults_for(self, rule: "Rule") -> bool:
+        """Check if this rule has defaults for a given rule.
+
+        :internal:
+        """
+        return bool(
+            not self.build_only
+            and self.defaults
+            and self.endpoint == rule.endpoint
+            and self != rule
+            and self.arguments == rule.arguments
+        )
+
+    def suitable_for(
+        self, values: t.Mapping[str, t.Any], method: t.Optional[str] = None
+    ) -> bool:
+        """Check if the dict of values has enough data for url generation.
+
+        :internal:
+        """
+        # if a method was given explicitly and that method is not supported
+        # by this rule, this rule is not suitable.
+        if (
+            method is not None
+            and self.methods is not None
+            and method not in self.methods
+        ):
+            return False
+
+        defaults = self.defaults or ()
+
+        # all arguments required must be either in the defaults dict or
+        # the value dictionary otherwise it's not suitable
+        for key in self.arguments:
+            if key not in defaults and key not in values:
+                return False
+
+        # in case defaults are given we ensure that either the value was
+        # skipped or the value is the same as the default value.
+        if defaults:
+            for key, value in defaults.items():
+                if key in values and value != values[key]:
+                    return False
+
+        return True
+
+    def match_compare_key(
+        self,
+    ) -> t.Tuple[bool, int, t.Iterable[t.Tuple[int, int]], int, t.Iterable[int]]:
+        """The match compare key for sorting.
+
+        Current implementation:
+
+        1.  rules without any arguments come first for performance
+            reasons only as we expect them to match faster and some
+            common ones usually don't have any arguments (index pages etc.)
+        2.  rules with more static parts come first so the second argument
+            is the negative length of the number of the static weights.
+        3.  we order by static weights, which is a combination of index
+            and length
+        4.  The more complex rules come first so the next argument is the
+            negative length of the number of argument weights.
+        5.  lastly we order by the actual argument weights.
+
+        :internal:
+        """
+        return (
+            bool(self.arguments),
+            -len(self._static_weights),
+            self._static_weights,
+            -len(self._argument_weights),
+            self._argument_weights,
+        )
+
+    def build_compare_key(self) -> t.Tuple[int, int, int]:
+        """The build compare key for sorting.
+
+        :internal:
+        """
+        return (1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ()))
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, type(self)) and self._trace == other._trace
+
+    __hash__ = None  # type: ignore
+
+    def __str__(self) -> str:
+        return self.rule
+
+    def __repr__(self) -> str:
+        if self.map is None:
+            return f"<{type(self).__name__} (unbound)>"
+        parts = []
+        for is_dynamic, data in self._trace:
+            if is_dynamic:
+                parts.append(f"<{data}>")
+            else:
+                parts.append(data)
+        parts = "".join(parts).lstrip("|")
+        methods = f" ({', '.join(self.methods)})" if self.methods is not None else ""
+        return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>"
+
+
+class BaseConverter:
+    """Base class for all converters."""
+
+    regex = "[^/]+"
+    weight = 100
+
+    def __init__(self, map: "Map", *args: t.Any, **kwargs: t.Any) -> None:
+        self.map = map
+
+    def to_python(self, value: str) -> t.Any:
+        return value
+
+    def to_url(self, value: t.Any) -> str:
+        if isinstance(value, (bytes, bytearray)):
+            return _fast_url_quote(value)
+        return _fast_url_quote(str(value).encode(self.map.charset))
+
+
+class UnicodeConverter(BaseConverter):
+    """This converter is the default converter and accepts any string but
+    only one path segment.  Thus the string can not include a slash.
+
+    This is the default validator.
+
+    Example::
+
+        Rule('/pages/<page>'),
+        Rule('/<string(length=2):lang_code>')
+
+    :param map: the :class:`Map`.
+    :param minlength: the minimum length of the string.  Must be greater
+                      or equal 1.
+    :param maxlength: the maximum length of the string.
+    :param length: the exact length of the string.
+    """
+
+    def __init__(
+        self,
+        map: "Map",
+        minlength: int = 1,
+        maxlength: t.Optional[int] = None,
+        length: t.Optional[int] = None,
+    ) -> None:
+        super().__init__(map)
+        if length is not None:
+            length_regex = f"{{{int(length)}}}"
+        else:
+            if maxlength is None:
+                maxlength_value = ""
+            else:
+                maxlength_value = str(int(maxlength))
+            length_regex = f"{{{int(minlength)},{maxlength_value}}}"
+        self.regex = f"[^/]{length_regex}"
+
+
+class AnyConverter(BaseConverter):
+    """Matches one of the items provided.  Items can either be Python
+    identifiers or strings::
+
+        Rule('/<any(about, help, imprint, class, "foo,bar"):page_name>')
+
+    :param map: the :class:`Map`.
+    :param items: this function accepts the possible items as positional
+                  arguments.
+    """
+
+    def __init__(self, map: "Map", *items: str) -> None:
+        super().__init__(map)
+        self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})"
+
+
+class PathConverter(BaseConverter):
+    """Like the default :class:`UnicodeConverter`, but it also matches
+    slashes.  This is useful for wikis and similar applications::
+
+        Rule('/<path:wikipage>')
+        Rule('/<path:wikipage>/edit')
+
+    :param map: the :class:`Map`.
+    """
+
+    regex = "[^/].*?"
+    weight = 200
+
+
+class NumberConverter(BaseConverter):
+    """Baseclass for `IntegerConverter` and `FloatConverter`.
+
+    :internal:
+    """
+
+    weight = 50
+    num_convert: t.Callable = int
+
+    def __init__(
+        self,
+        map: "Map",
+        fixed_digits: int = 0,
+        min: t.Optional[int] = None,
+        max: t.Optional[int] = None,
+        signed: bool = False,
+    ) -> None:
+        if signed:
+            self.regex = self.signed_regex
+        super().__init__(map)
+        self.fixed_digits = fixed_digits
+        self.min = min
+        self.max = max
+        self.signed = signed
+
+    def to_python(self, value: str) -> t.Any:
+        if self.fixed_digits and len(value) != self.fixed_digits:
+            raise ValidationError()
+        value = self.num_convert(value)
+        if (self.min is not None and value < self.min) or (
+            self.max is not None and value > self.max
+        ):
+            raise ValidationError()
+        return value
+
+    def to_url(self, value: t.Any) -> str:
+        value = str(self.num_convert(value))
+        if self.fixed_digits:
+            value = value.zfill(self.fixed_digits)
+        return value
+
+    @property
+    def signed_regex(self) -> str:
+        return f"-?{self.regex}"
+
+
+class IntegerConverter(NumberConverter):
+    """This converter only accepts integer values::
+
+        Rule("/page/<int:page>")
+
+    By default it only accepts unsigned, positive values. The ``signed``
+    parameter will enable signed, negative values. ::
+
+        Rule("/page/<int(signed=True):page>")
+
+    :param map: The :class:`Map`.
+    :param fixed_digits: The number of fixed digits in the URL. If you
+        set this to ``4`` for example, the rule will only match if the
+        URL looks like ``/0001/``. The default is variable length.
+    :param min: The minimal value.
+    :param max: The maximal value.
+    :param signed: Allow signed (negative) values.
+
+    .. versionadded:: 0.15
+        The ``signed`` parameter.
+    """
+
+    regex = r"\d+"
+
+
+class FloatConverter(NumberConverter):
+    """This converter only accepts floating point values::
+
+        Rule("/probability/<float:probability>")
+
+    By default it only accepts unsigned, positive values. The ``signed``
+    parameter will enable signed, negative values. ::
+
+        Rule("/offset/<float(signed=True):offset>")
+
+    :param map: The :class:`Map`.
+    :param min: The minimal value.
+    :param max: The maximal value.
+    :param signed: Allow signed (negative) values.
+
+    .. versionadded:: 0.15
+        The ``signed`` parameter.
+    """
+
+    regex = r"\d+\.\d+"
+    num_convert = float
+
+    def __init__(
+        self,
+        map: "Map",
+        min: t.Optional[float] = None,
+        max: t.Optional[float] = None,
+        signed: bool = False,
+    ) -> None:
+        super().__init__(map, min=min, max=max, signed=signed)  # type: ignore
+
+
+class UUIDConverter(BaseConverter):
+    """This converter only accepts UUID strings::
+
+        Rule('/object/<uuid:identifier>')
+
+    .. versionadded:: 0.10
+
+    :param map: the :class:`Map`.
+    """
+
+    regex = (
+        r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-"
+        r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}"
+    )
+
+    def to_python(self, value: str) -> uuid.UUID:
+        return uuid.UUID(value)
+
+    def to_url(self, value: uuid.UUID) -> str:
+        return str(value)
+
+
+#: the default converter mapping for the map.
+DEFAULT_CONVERTERS: t.Mapping[str, t.Type[BaseConverter]] = {
+    "default": UnicodeConverter,
+    "string": UnicodeConverter,
+    "any": AnyConverter,
+    "path": PathConverter,
+    "int": IntegerConverter,
+    "float": FloatConverter,
+    "uuid": UUIDConverter,
+}
+
+
+class Map:
+    """The map class stores all the URL rules and some configuration
+    parameters.  Some of the configuration values are only stored on the
+    `Map` instance since those affect all rules, others are just defaults
+    and can be overridden for each rule.  Note that you have to specify all
+    arguments besides the `rules` as keyword arguments!
+
+    :param rules: sequence of url rules for this map.
+    :param default_subdomain: The default subdomain for rules without a
+                              subdomain defined.
+    :param charset: charset of the url. defaults to ``"utf-8"``
+    :param strict_slashes: If a rule ends with a slash but the matched
+        URL does not, redirect to the URL with a trailing slash.
+    :param merge_slashes: Merge consecutive slashes when matching or
+        building URLs. Matches will redirect to the normalized URL.
+        Slashes in variable parts are not merged.
+    :param redirect_defaults: This will redirect to the default rule if it
+                              wasn't visited that way. This helps creating
+                              unique URLs.
+    :param converters: A dict of converters that adds additional converters
+                       to the list of converters. If you redefine one
+                       converter this will override the original one.
+    :param sort_parameters: If set to `True` the url parameters are sorted.
+                            See `url_encode` for more details.
+    :param sort_key: The sort key function for `url_encode`.
+    :param encoding_errors: the error method to use for decoding
+    :param host_matching: if set to `True` it enables the host matching
+                          feature and disables the subdomain one.  If
+                          enabled the `host` parameter to rules is used
+                          instead of the `subdomain` one.
+
+    .. versionchanged:: 1.0
+        If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules
+        will match.
+
+    .. versionchanged:: 1.0
+        Added ``merge_slashes``.
+
+    .. versionchanged:: 0.7
+        Added ``encoding_errors`` and ``host_matching``.
+
+    .. versionchanged:: 0.5
+        Added ``sort_parameters`` and ``sort_key``.
+    """
+
+    #: A dict of default converters to be used.
+    default_converters = ImmutableDict(DEFAULT_CONVERTERS)
+
+    #: The type of lock to use when updating.
+    #:
+    #: .. versionadded:: 1.0
+    lock_class = Lock
+
+    def __init__(
+        self,
+        rules: t.Optional[t.Iterable[RuleFactory]] = None,
+        default_subdomain: str = "",
+        charset: str = "utf-8",
+        strict_slashes: bool = True,
+        merge_slashes: bool = True,
+        redirect_defaults: bool = True,
+        converters: t.Optional[t.Mapping[str, t.Type[BaseConverter]]] = None,
+        sort_parameters: bool = False,
+        sort_key: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+        encoding_errors: str = "replace",
+        host_matching: bool = False,
+    ) -> None:
+        self._rules: t.List[Rule] = []
+        self._rules_by_endpoint: t.Dict[str, t.List[Rule]] = {}
+        self._remap = True
+        self._remap_lock = self.lock_class()
+
+        self.default_subdomain = default_subdomain
+        self.charset = charset
+        self.encoding_errors = encoding_errors
+        self.strict_slashes = strict_slashes
+        self.merge_slashes = merge_slashes
+        self.redirect_defaults = redirect_defaults
+        self.host_matching = host_matching
+
+        self.converters = self.default_converters.copy()
+        if converters:
+            self.converters.update(converters)
+
+        self.sort_parameters = sort_parameters
+        self.sort_key = sort_key
+
+        for rulefactory in rules or ():
+            self.add(rulefactory)
+
+    def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool:
+        """Iterate over all rules and check if the endpoint expects
+        the arguments provided.  This is for example useful if you have
+        some URLs that expect a language code and others that do not and
+        you want to wrap the builder a bit so that the current language
+        code is automatically added if not provided but endpoints expect
+        it.
+
+        :param endpoint: the endpoint to check.
+        :param arguments: this function accepts one or more arguments
+                          as positional arguments.  Each one of them is
+                          checked.
+        """
+        self.update()
+        arguments = set(arguments)
+        for rule in self._rules_by_endpoint[endpoint]:
+            if arguments.issubset(rule.arguments):
+                return True
+        return False
+
+    def iter_rules(self, endpoint: t.Optional[str] = None) -> t.Iterator[Rule]:
+        """Iterate over all rules or the rules of an endpoint.
+
+        :param endpoint: if provided only the rules for that endpoint
+                         are returned.
+        :return: an iterator
+        """
+        self.update()
+        if endpoint is not None:
+            return iter(self._rules_by_endpoint[endpoint])
+        return iter(self._rules)
+
+    def add(self, rulefactory: RuleFactory) -> None:
+        """Add a new rule or factory to the map and bind it.  Requires that the
+        rule is not bound to another map.
+
+        :param rulefactory: a :class:`Rule` or :class:`RuleFactory`
+        """
+        for rule in rulefactory.get_rules(self):
+            rule.bind(self)
+            self._rules.append(rule)
+            self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule)
+        self._remap = True
+
+    def bind(
+        self,
+        server_name: str,
+        script_name: t.Optional[str] = None,
+        subdomain: t.Optional[str] = None,
+        url_scheme: str = "http",
+        default_method: str = "GET",
+        path_info: t.Optional[str] = None,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+    ) -> "MapAdapter":
+        """Return a new :class:`MapAdapter` with the details specified to the
+        call.  Note that `script_name` will default to ``'/'`` if not further
+        specified or `None`.  The `server_name` at least is a requirement
+        because the HTTP RFC requires absolute URLs for redirects and so all
+        redirect exceptions raised by Werkzeug will contain the full canonical
+        URL.
+
+        If no path_info is passed to :meth:`match` it will use the default path
+        info passed to bind.  While this doesn't really make sense for
+        manual bind calls, it's useful if you bind a map to a WSGI
+        environment which already contains the path info.
+
+        `subdomain` will default to the `default_subdomain` for this map if
+        no defined. If there is no `default_subdomain` you cannot use the
+        subdomain feature.
+
+        .. versionchanged:: 1.0
+            If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules
+            will match.
+
+        .. versionchanged:: 0.15
+            ``path_info`` defaults to ``'/'`` if ``None``.
+
+        .. versionchanged:: 0.8
+            ``query_args`` can be a string.
+
+        .. versionchanged:: 0.7
+            Added ``query_args``.
+        """
+        server_name = server_name.lower()
+        if self.host_matching:
+            if subdomain is not None:
+                raise RuntimeError("host matching enabled and a subdomain was provided")
+        elif subdomain is None:
+            subdomain = self.default_subdomain
+        if script_name is None:
+            script_name = "/"
+        if path_info is None:
+            path_info = "/"
+        try:
+            server_name = _encode_idna(server_name)  # type: ignore
+        except UnicodeError:
+            raise BadHost()
+        return MapAdapter(
+            self,
+            server_name,
+            script_name,
+            subdomain,
+            url_scheme,
+            path_info,
+            default_method,
+            query_args,
+        )
+
+    def bind_to_environ(
+        self,
+        environ: "WSGIEnvironment",
+        server_name: t.Optional[str] = None,
+        subdomain: t.Optional[str] = None,
+    ) -> "MapAdapter":
+        """Like :meth:`bind` but you can pass it an WSGI environment and it
+        will fetch the information from that dictionary.  Note that because of
+        limitations in the protocol there is no way to get the current
+        subdomain and real `server_name` from the environment.  If you don't
+        provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or
+        `HTTP_HOST` if provided) as used `server_name` with disabled subdomain
+        feature.
+
+        If `subdomain` is `None` but an environment and a server name is
+        provided it will calculate the current subdomain automatically.
+        Example: `server_name` is ``'example.com'`` and the `SERVER_NAME`
+        in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated
+        subdomain will be ``'staging.dev'``.
+
+        If the object passed as environ has an environ attribute, the value of
+        this attribute is used instead.  This allows you to pass request
+        objects.  Additionally `PATH_INFO` added as a default of the
+        :class:`MapAdapter` so that you don't have to pass the path info to
+        the match method.
+
+        .. versionchanged:: 1.0.0
+            If the passed server name specifies port 443, it will match
+            if the incoming scheme is ``https`` without a port.
+
+        .. versionchanged:: 1.0.0
+            A warning is shown when the passed server name does not
+            match the incoming WSGI server name.
+
+        .. versionchanged:: 0.8
+           This will no longer raise a ValueError when an unexpected server
+           name was passed.
+
+        .. versionchanged:: 0.5
+            previously this method accepted a bogus `calculate_subdomain`
+            parameter that did not have any effect.  It was removed because
+            of that.
+
+        :param environ: a WSGI environment.
+        :param server_name: an optional server name hint (see above).
+        :param subdomain: optionally the current subdomain (see above).
+        """
+        environ = _get_environ(environ)
+        wsgi_server_name = get_host(environ).lower()
+        scheme = environ["wsgi.url_scheme"]
+
+        if (
+            environ.get("HTTP_CONNECTION", "").lower() == "upgrade"
+            and environ.get("HTTP_UPGRADE", "").lower() == "websocket"
+        ):
+            scheme = "wss" if scheme == "https" else "ws"
+
+        if server_name is None:
+            server_name = wsgi_server_name
+        else:
+            server_name = server_name.lower()
+
+            # strip standard port to match get_host()
+            if scheme in {"http", "ws"} and server_name.endswith(":80"):
+                server_name = server_name[:-3]
+            elif scheme in {"https", "wss"} and server_name.endswith(":443"):
+                server_name = server_name[:-4]
+
+        if subdomain is None and not self.host_matching:
+            cur_server_name = wsgi_server_name.split(".")
+            real_server_name = server_name.split(".")
+            offset = -len(real_server_name)
+
+            if cur_server_name[offset:] != real_server_name:
+                # This can happen even with valid configs if the server was
+                # accessed directly by IP address under some situations.
+                # Instead of raising an exception like in Werkzeug 0.7 or
+                # earlier we go by an invalid subdomain which will result
+                # in a 404 error on matching.
+                warnings.warn(
+                    f"Current server name {wsgi_server_name!r} doesn't match configured"
+                    f" server name {server_name!r}",
+                    stacklevel=2,
+                )
+                subdomain = "<invalid>"
+            else:
+                subdomain = ".".join(filter(None, cur_server_name[:offset]))
+
+        def _get_wsgi_string(name: str) -> t.Optional[str]:
+            val = environ.get(name)
+            if val is not None:
+                return _wsgi_decoding_dance(val, self.charset)
+            return None
+
+        script_name = _get_wsgi_string("SCRIPT_NAME")
+        path_info = _get_wsgi_string("PATH_INFO")
+        query_args = _get_wsgi_string("QUERY_STRING")
+        return Map.bind(
+            self,
+            server_name,
+            script_name,
+            subdomain,
+            scheme,
+            environ["REQUEST_METHOD"],
+            path_info,
+            query_args=query_args,
+        )
+
+    def update(self) -> None:
+        """Called before matching and building to keep the compiled rules
+        in the correct order after things changed.
+        """
+        if not self._remap:
+            return
+
+        with self._remap_lock:
+            if not self._remap:
+                return
+
+            self._rules.sort(key=lambda x: x.match_compare_key())
+            for rules in self._rules_by_endpoint.values():
+                rules.sort(key=lambda x: x.build_compare_key())
+            self._remap = False
+
+    def __repr__(self) -> str:
+        rules = self.iter_rules()
+        return f"{type(self).__name__}({pformat(list(rules))})"
+
+
+class MapAdapter:
+
+    """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does
+    the URL matching and building based on runtime information.
+    """
+
+    def __init__(
+        self,
+        map: Map,
+        server_name: str,
+        script_name: str,
+        subdomain: t.Optional[str],
+        url_scheme: str,
+        path_info: str,
+        default_method: str,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+    ):
+        self.map = map
+        self.server_name = _to_str(server_name)
+        script_name = _to_str(script_name)
+        if not script_name.endswith("/"):
+            script_name += "/"
+        self.script_name = script_name
+        self.subdomain = _to_str(subdomain)
+        self.url_scheme = _to_str(url_scheme)
+        self.path_info = _to_str(path_info)
+        self.default_method = _to_str(default_method)
+        self.query_args = query_args
+        self.websocket = self.url_scheme in {"ws", "wss"}
+
+    def dispatch(
+        self,
+        view_func: t.Callable[[str, t.Mapping[str, t.Any]], "WSGIApplication"],
+        path_info: t.Optional[str] = None,
+        method: t.Optional[str] = None,
+        catch_http_exceptions: bool = False,
+    ) -> "WSGIApplication":
+        """Does the complete dispatching process.  `view_func` is called with
+        the endpoint and a dict with the values for the view.  It should
+        look up the view function, call it, and return a response object
+        or WSGI application.  http exceptions are not caught by default
+        so that applications can display nicer error messages by just
+        catching them by hand.  If you want to stick with the default
+        error messages you can pass it ``catch_http_exceptions=True`` and
+        it will catch the http exceptions.
+
+        Here a small example for the dispatch usage::
+
+            from werkzeug.wrappers import Request, Response
+            from werkzeug.wsgi import responder
+            from werkzeug.routing import Map, Rule
+
+            def on_index(request):
+                return Response('Hello from the index')
+
+            url_map = Map([Rule('/', endpoint='index')])
+            views = {'index': on_index}
+
+            @responder
+            def application(environ, start_response):
+                request = Request(environ)
+                urls = url_map.bind_to_environ(environ)
+                return urls.dispatch(lambda e, v: views[e](request, **v),
+                                     catch_http_exceptions=True)
+
+        Keep in mind that this method might return exception objects, too, so
+        use :class:`Response.force_type` to get a response object.
+
+        :param view_func: a function that is called with the endpoint as
+                          first argument and the value dict as second.  Has
+                          to dispatch to the actual view function with this
+                          information.  (see above)
+        :param path_info: the path info to use for matching.  Overrides the
+                          path info specified on binding.
+        :param method: the HTTP method used for matching.  Overrides the
+                       method specified on binding.
+        :param catch_http_exceptions: set to `True` to catch any of the
+                                      werkzeug :class:`HTTPException`\\s.
+        """
+        try:
+            try:
+                endpoint, args = self.match(path_info, method)
+            except RequestRedirect as e:
+                return e
+            return view_func(endpoint, args)
+        except HTTPException as e:
+            if catch_http_exceptions:
+                return e
+            raise
+
+    @typing.overload
+    def match(  # type: ignore
+        self,
+        path_info: t.Optional[str] = None,
+        method: t.Optional[str] = None,
+        return_rule: "te.Literal[False]" = False,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+        websocket: t.Optional[bool] = None,
+    ) -> t.Tuple[str, t.Mapping[str, t.Any]]:
+        ...
+
+    @typing.overload
+    def match(
+        self,
+        path_info: t.Optional[str] = None,
+        method: t.Optional[str] = None,
+        return_rule: "te.Literal[True]" = True,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+        websocket: t.Optional[bool] = None,
+    ) -> t.Tuple[Rule, t.Mapping[str, t.Any]]:
+        ...
+
+    def match(
+        self,
+        path_info: t.Optional[str] = None,
+        method: t.Optional[str] = None,
+        return_rule: bool = False,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+        websocket: t.Optional[bool] = None,
+    ) -> t.Tuple[t.Union[str, Rule], t.Mapping[str, t.Any]]:
+        """The usage is simple: you just pass the match method the current
+        path info as well as the method (which defaults to `GET`).  The
+        following things can then happen:
+
+        - you receive a `NotFound` exception that indicates that no URL is
+          matching.  A `NotFound` exception is also a WSGI application you
+          can call to get a default page not found page (happens to be the
+          same object as `werkzeug.exceptions.NotFound`)
+
+        - you receive a `MethodNotAllowed` exception that indicates that there
+          is a match for this URL but not for the current request method.
+          This is useful for RESTful applications.
+
+        - you receive a `RequestRedirect` exception with a `new_url`
+          attribute.  This exception is used to notify you about a request
+          Werkzeug requests from your WSGI application.  This is for example the
+          case if you request ``/foo`` although the correct URL is ``/foo/``
+          You can use the `RequestRedirect` instance as response-like object
+          similar to all other subclasses of `HTTPException`.
+
+        - you receive a ``WebsocketMismatch`` exception if the only
+          match is a WebSocket rule but the bind is an HTTP request, or
+          if the match is an HTTP rule but the bind is a WebSocket
+          request.
+
+        - you get a tuple in the form ``(endpoint, arguments)`` if there is
+          a match (unless `return_rule` is True, in which case you get a tuple
+          in the form ``(rule, arguments)``)
+
+        If the path info is not passed to the match method the default path
+        info of the map is used (defaults to the root URL if not defined
+        explicitly).
+
+        All of the exceptions raised are subclasses of `HTTPException` so they
+        can be used as WSGI responses. They will all render generic error or
+        redirect pages.
+
+        Here is a small example for matching:
+
+        >>> m = Map([
+        ...     Rule('/', endpoint='index'),
+        ...     Rule('/downloads/', endpoint='downloads/index'),
+        ...     Rule('/downloads/<int:id>', endpoint='downloads/show')
+        ... ])
+        >>> urls = m.bind("example.com", "/")
+        >>> urls.match("/", "GET")
+        ('index', {})
+        >>> urls.match("/downloads/42")
+        ('downloads/show', {'id': 42})
+
+        And here is what happens on redirect and missing URLs:
+
+        >>> urls.match("/downloads")
+        Traceback (most recent call last):
+          ...
+        RequestRedirect: http://example.com/downloads/
+        >>> urls.match("/missing")
+        Traceback (most recent call last):
+          ...
+        NotFound: 404 Not Found
+
+        :param path_info: the path info to use for matching.  Overrides the
+                          path info specified on binding.
+        :param method: the HTTP method used for matching.  Overrides the
+                       method specified on binding.
+        :param return_rule: return the rule that matched instead of just the
+                            endpoint (defaults to `False`).
+        :param query_args: optional query arguments that are used for
+                           automatic redirects as string or dictionary.  It's
+                           currently not possible to use the query arguments
+                           for URL matching.
+        :param websocket: Match WebSocket instead of HTTP requests. A
+            websocket request has a ``ws`` or ``wss``
+            :attr:`url_scheme`. This overrides that detection.
+
+        .. versionadded:: 1.0
+            Added ``websocket``.
+
+        .. versionchanged:: 0.8
+            ``query_args`` can be a string.
+
+        .. versionadded:: 0.7
+            Added ``query_args``.
+
+        .. versionadded:: 0.6
+            Added ``return_rule``.
+        """
+        self.map.update()
+        if path_info is None:
+            path_info = self.path_info
+        else:
+            path_info = _to_str(path_info, self.map.charset)
+        if query_args is None:
+            query_args = self.query_args or {}
+        method = (method or self.default_method).upper()
+
+        if websocket is None:
+            websocket = self.websocket
+
+        require_redirect = False
+
+        domain_part = self.server_name if self.map.host_matching else self.subdomain
+        path_part = f"/{path_info.lstrip('/')}" if path_info else ""
+        path = f"{domain_part}|{path_part}"
+
+        have_match_for = set()
+        websocket_mismatch = False
+
+        for rule in self.map._rules:
+            try:
+                rv = rule.match(path, method)
+            except RequestPath as e:
+                raise RequestRedirect(
+                    self.make_redirect_url(
+                        url_quote(e.path_info, self.map.charset, safe="/:|+"),
+                        query_args,
+                    )
+                )
+            except RequestAliasRedirect as e:
+                raise RequestRedirect(
+                    self.make_alias_redirect_url(
+                        path, rule.endpoint, e.matched_values, method, query_args
+                    )
+                )
+            if rv is None:
+                continue
+            if rule.methods is not None and method not in rule.methods:
+                have_match_for.update(rule.methods)
+                continue
+
+            if rule.websocket != websocket:
+                websocket_mismatch = True
+                continue
+
+            if self.map.redirect_defaults:
+                redirect_url = self.get_default_redirect(rule, method, rv, query_args)
+                if redirect_url is not None:
+                    raise RequestRedirect(redirect_url)
+
+            if rule.redirect_to is not None:
+                if isinstance(rule.redirect_to, str):
+
+                    def _handle_match(match: t.Match[str]) -> str:
+                        value = rv[match.group(1)]  # type: ignore
+                        return rule._converters[match.group(1)].to_url(value)
+
+                    redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to)
+                else:
+                    redirect_url = rule.redirect_to(self, **rv)
+
+                if self.subdomain:
+                    netloc = f"{self.subdomain}.{self.server_name}"
+                else:
+                    netloc = self.server_name
+
+                raise RequestRedirect(
+                    url_join(
+                        f"{self.url_scheme or 'http'}://{netloc}{self.script_name}",
+                        redirect_url,
+                    )
+                )
+
+            if require_redirect:
+                raise RequestRedirect(
+                    self.make_redirect_url(
+                        url_quote(path_info, self.map.charset, safe="/:|+"), query_args
+                    )
+                )
+
+            if return_rule:
+                return rule, rv
+            else:
+                return rule.endpoint, rv
+
+        if have_match_for:
+            raise MethodNotAllowed(valid_methods=list(have_match_for))
+
+        if websocket_mismatch:
+            raise WebsocketMismatch()
+
+        raise NotFound()
+
+    def test(
+        self, path_info: t.Optional[str] = None, method: t.Optional[str] = None
+    ) -> bool:
+        """Test if a rule would match.  Works like `match` but returns `True`
+        if the URL matches, or `False` if it does not exist.
+
+        :param path_info: the path info to use for matching.  Overrides the
+                          path info specified on binding.
+        :param method: the HTTP method used for matching.  Overrides the
+                       method specified on binding.
+        """
+        try:
+            self.match(path_info, method)
+        except RequestRedirect:
+            pass
+        except HTTPException:
+            return False
+        return True
+
+    def allowed_methods(self, path_info: t.Optional[str] = None) -> t.Iterable[str]:
+        """Returns the valid methods that match for a given path.
+
+        .. versionadded:: 0.7
+        """
+        try:
+            self.match(path_info, method="--")
+        except MethodNotAllowed as e:
+            return e.valid_methods  # type: ignore
+        except HTTPException:
+            pass
+        return []
+
+    def get_host(self, domain_part: t.Optional[str]) -> str:
+        """Figures out the full host name for the given domain part.  The
+        domain part is a subdomain in case host matching is disabled or
+        a full host name.
+        """
+        if self.map.host_matching:
+            if domain_part is None:
+                return self.server_name
+            return _to_str(domain_part, "ascii")
+        subdomain = domain_part
+        if subdomain is None:
+            subdomain = self.subdomain
+        else:
+            subdomain = _to_str(subdomain, "ascii")
+
+        if subdomain:
+            return f"{subdomain}.{self.server_name}"
+        else:
+            return self.server_name
+
+    def get_default_redirect(
+        self,
+        rule: Rule,
+        method: str,
+        values: t.MutableMapping[str, t.Any],
+        query_args: t.Union[t.Mapping[str, t.Any], str],
+    ) -> t.Optional[str]:
+        """A helper that returns the URL to redirect to if it finds one.
+        This is used for default redirecting only.
+
+        :internal:
+        """
+        assert self.map.redirect_defaults
+        for r in self.map._rules_by_endpoint[rule.endpoint]:
+            # every rule that comes after this one, including ourself
+            # has a lower priority for the defaults.  We order the ones
+            # with the highest priority up for building.
+            if r is rule:
+                break
+            if r.provides_defaults_for(rule) and r.suitable_for(values, method):
+                values.update(r.defaults)  # type: ignore
+                domain_part, path = r.build(values)  # type: ignore
+                return self.make_redirect_url(path, query_args, domain_part=domain_part)
+        return None
+
+    def encode_query_args(self, query_args: t.Union[t.Mapping[str, t.Any], str]) -> str:
+        if not isinstance(query_args, str):
+            return url_encode(query_args, self.map.charset)
+        return query_args
+
+    def make_redirect_url(
+        self,
+        path_info: str,
+        query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None,
+        domain_part: t.Optional[str] = None,
+    ) -> str:
+        """Creates a redirect URL.
+
+        :internal:
+        """
+        if query_args:
+            suffix = f"?{self.encode_query_args(query_args)}"
+        else:
+            suffix = ""
+
+        scheme = self.url_scheme or "http"
+        host = self.get_host(domain_part)
+        path = posixpath.join(self.script_name.strip("/"), path_info.lstrip("/"))
+        return f"{scheme}://{host}/{path}{suffix}"
+
+    def make_alias_redirect_url(
+        self,
+        path: str,
+        endpoint: str,
+        values: t.Mapping[str, t.Any],
+        method: str,
+        query_args: t.Union[t.Mapping[str, t.Any], str],
+    ) -> str:
+        """Internally called to make an alias redirect URL."""
+        url = self.build(
+            endpoint, values, method, append_unknown=False, force_external=True
+        )
+        if query_args:
+            url += f"?{self.encode_query_args(query_args)}"
+        assert url != path, "detected invalid alias setting. No canonical URL found"
+        return url
+
+    def _partial_build(
+        self,
+        endpoint: str,
+        values: t.Mapping[str, t.Any],
+        method: t.Optional[str],
+        append_unknown: bool,
+    ) -> t.Optional[t.Tuple[str, str, bool]]:
+        """Helper for :meth:`build`.  Returns subdomain and path for the
+        rule that accepts this endpoint, values and method.
+
+        :internal:
+        """
+        # in case the method is none, try with the default method first
+        if method is None:
+            rv = self._partial_build(
+                endpoint, values, self.default_method, append_unknown
+            )
+            if rv is not None:
+                return rv
+
+        # Default method did not match or a specific method is passed.
+        # Check all for first match with matching host. If no matching
+        # host is found, go with first result.
+        first_match = None
+
+        for rule in self.map._rules_by_endpoint.get(endpoint, ()):
+            if rule.suitable_for(values, method):
+                build_rv = rule.build(values, append_unknown)
+
+                if build_rv is not None:
+                    rv = (build_rv[0], build_rv[1], rule.websocket)
+                    if self.map.host_matching:
+                        if rv[0] == self.server_name:
+                            return rv
+                        elif first_match is None:
+                            first_match = rv
+                    else:
+                        return rv
+
+        return first_match
+
+    def build(
+        self,
+        endpoint: str,
+        values: t.Optional[t.Mapping[str, t.Any]] = None,
+        method: t.Optional[str] = None,
+        force_external: bool = False,
+        append_unknown: bool = True,
+        url_scheme: t.Optional[str] = None,
+    ) -> str:
+        """Building URLs works pretty much the other way round.  Instead of
+        `match` you call `build` and pass it the endpoint and a dict of
+        arguments for the placeholders.
+
+        The `build` function also accepts an argument called `force_external`
+        which, if you set it to `True` will force external URLs. Per default
+        external URLs (include the server name) will only be used if the
+        target URL is on a different subdomain.
+
+        >>> m = Map([
+        ...     Rule('/', endpoint='index'),
+        ...     Rule('/downloads/', endpoint='downloads/index'),
+        ...     Rule('/downloads/<int:id>', endpoint='downloads/show')
+        ... ])
+        >>> urls = m.bind("example.com", "/")
+        >>> urls.build("index", {})
+        '/'
+        >>> urls.build("downloads/show", {'id': 42})
+        '/downloads/42'
+        >>> urls.build("downloads/show", {'id': 42}, force_external=True)
+        'http://example.com/downloads/42'
+
+        Because URLs cannot contain non ASCII data you will always get
+        bytes back.  Non ASCII characters are urlencoded with the
+        charset defined on the map instance.
+
+        Additional values are converted to strings and appended to the URL as
+        URL querystring parameters:
+
+        >>> urls.build("index", {'q': 'My Searchstring'})
+        '/?q=My+Searchstring'
+
+        When processing those additional values, lists are furthermore
+        interpreted as multiple values (as per
+        :py:class:`werkzeug.datastructures.MultiDict`):
+
+        >>> urls.build("index", {'q': ['a', 'b', 'c']})
+        '/?q=a&q=b&q=c'
+
+        Passing a ``MultiDict`` will also add multiple values:
+
+        >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b'))))
+        '/?p=z&q=a&q=b'
+
+        If a rule does not exist when building a `BuildError` exception is
+        raised.
+
+        The build method accepts an argument called `method` which allows you
+        to specify the method you want to have an URL built for if you have
+        different methods for the same endpoint specified.
+
+        :param endpoint: the endpoint of the URL to build.
+        :param values: the values for the URL to build.  Unhandled values are
+                       appended to the URL as query parameters.
+        :param method: the HTTP method for the rule if there are different
+                       URLs for different methods on the same endpoint.
+        :param force_external: enforce full canonical external URLs. If the URL
+                               scheme is not provided, this will generate
+                               a protocol-relative URL.
+        :param append_unknown: unknown parameters are appended to the generated
+                               URL as query string argument.  Disable this
+                               if you want the builder to ignore those.
+        :param url_scheme: Scheme to use in place of the bound
+            :attr:`url_scheme`.
+
+        .. versionchanged:: 2.0
+            Added the ``url_scheme`` parameter.
+
+        .. versionadded:: 0.6
+           Added the ``append_unknown`` parameter.
+        """
+        self.map.update()
+
+        if values:
+            temp_values: t.Dict[str, t.Union[t.List[t.Any], t.Any]] = {}
+            always_list = isinstance(values, MultiDict)
+            key: str
+            value: t.Optional[t.Union[t.List[t.Any], t.Any]]
+
+            # For MultiDict, dict.items(values) is like values.lists()
+            # without the call or list coercion overhead.
+            for key, value in dict.items(values):  # type: ignore
+                if value is None:
+                    continue
+
+                if always_list or isinstance(value, (list, tuple)):
+                    value = [v for v in value if v is not None]
+
+                    if not value:
+                        continue
+
+                    if len(value) == 1:
+                        value = value[0]
+
+                temp_values[key] = value
+
+            values = temp_values
+        else:
+            values = {}
+
+        rv = self._partial_build(endpoint, values, method, append_unknown)
+        if rv is None:
+            raise BuildError(endpoint, values, method, self)
+
+        domain_part, path, websocket = rv
+        host = self.get_host(domain_part)
+
+        if url_scheme is None:
+            url_scheme = self.url_scheme
+
+        # Always build WebSocket routes with the scheme (browsers
+        # require full URLs). If bound to a WebSocket, ensure that HTTP
+        # routes are built with an HTTP scheme.
+        secure = url_scheme in {"https", "wss"}
+
+        if websocket:
+            force_external = True
+            url_scheme = "wss" if secure else "ws"
+        elif url_scheme:
+            url_scheme = "https" if secure else "http"
+
+        # shortcut this.
+        if not force_external and (
+            (self.map.host_matching and host == self.server_name)
+            or (not self.map.host_matching and domain_part == self.subdomain)
+        ):
+            return f"{self.script_name.rstrip('/')}/{path.lstrip('/')}"
+
+        scheme = f"{url_scheme}:" if url_scheme else ""
+        return f"{scheme}//{host}{self.script_name[:-1]}/{path.lstrip('/')}"
diff --git a/venv/lib/python3.7/site-packages/werkzeug/sansio/__init__.py b/venv/lib/python3.7/site-packages/werkzeug/sansio/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/werkzeug/sansio/multipart.py b/venv/lib/python3.7/site-packages/werkzeug/sansio/multipart.py
new file mode 100644
index 00000000..bb8ab345
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/sansio/multipart.py
@@ -0,0 +1,260 @@
+import re
+from dataclasses import dataclass
+from enum import auto
+from enum import Enum
+from typing import cast
+from typing import List
+from typing import Optional
+from typing import Tuple
+
+from .._internal import _to_bytes
+from .._internal import _to_str
+from ..datastructures import Headers
+from ..exceptions import RequestEntityTooLarge
+from ..http import parse_options_header
+
+
+class Event:
+    pass
+
+
+@dataclass(frozen=True)
+class Preamble(Event):
+    data: bytes
+
+
+@dataclass(frozen=True)
+class Field(Event):
+    name: str
+    headers: Headers
+
+
+@dataclass(frozen=True)
+class File(Event):
+    name: str
+    filename: str
+    headers: Headers
+
+
+@dataclass(frozen=True)
+class Data(Event):
+    data: bytes
+    more_data: bool
+
+
+@dataclass(frozen=True)
+class Epilogue(Event):
+    data: bytes
+
+
+class NeedData(Event):
+    pass
+
+
+NEED_DATA = NeedData()
+
+
+class State(Enum):
+    PREAMBLE = auto()
+    PART = auto()
+    DATA = auto()
+    EPILOGUE = auto()
+    COMPLETE = auto()
+
+
+# Multipart line breaks MUST be CRLF (\r\n) by RFC-7578, except that
+# many implementations break this and either use CR or LF alone.
+LINE_BREAK = b"(?:\r\n|\n|\r)"
+BLANK_LINE_RE = re.compile(b"(?:\r\n\r\n|\r\r|\n\n)", re.MULTILINE)
+LINE_BREAK_RE = re.compile(LINE_BREAK, re.MULTILINE)
+# Header values can be continued via a space or tab after the linebreak, as
+# per RFC2231
+HEADER_CONTINUATION_RE = re.compile(b"%s[ \t]" % LINE_BREAK, re.MULTILINE)
+
+
+class MultipartDecoder:
+    """Decodes a multipart message as bytes into Python events.
+
+    The part data is returned as available to allow the caller to save
+    the data from memory to disk, if desired.
+    """
+
+    def __init__(
+        self,
+        boundary: bytes,
+        max_form_memory_size: Optional[int] = None,
+    ) -> None:
+        self.buffer = bytearray()
+        self.complete = False
+        self.max_form_memory_size = max_form_memory_size
+        self.state = State.PREAMBLE
+        self.boundary = boundary
+
+        # Note in the below \h i.e. horizontal whitespace is used
+        # as [^\S\n\r] as \h isn't supported in python.
+
+        # The preamble must end with a boundary where the boundary is
+        # prefixed by a line break, RFC2046. Except that many
+        # implementations including Werkzeug's tests omit the line
+        # break prefix. In addition the first boundary could be the
+        # epilogue boundary (for empty form-data) hence the matching
+        # group to understand if it is an epilogue boundary.
+        self.preamble_re = re.compile(
+            br"%s?--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)"
+            % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK),
+            re.MULTILINE,
+        )
+        # A boundary must include a line break prefix and suffix, and
+        # may include trailing whitespace. In addition the boundary
+        # could be the epilogue boundary hence the matching group to
+        # understand if it is an epilogue boundary.
+        self.boundary_re = re.compile(
+            br"%s--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)"
+            % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK),
+            re.MULTILINE,
+        )
+
+    def last_newline(self) -> int:
+        try:
+            last_nl = self.buffer.rindex(b"\n")
+        except ValueError:
+            last_nl = len(self.buffer)
+        try:
+            last_cr = self.buffer.rindex(b"\r")
+        except ValueError:
+            last_cr = len(self.buffer)
+
+        return min(last_nl, last_cr)
+
+    def receive_data(self, data: Optional[bytes]) -> None:
+        if data is None:
+            self.complete = True
+        elif (
+            self.max_form_memory_size is not None
+            and len(self.buffer) + len(data) > self.max_form_memory_size
+        ):
+            raise RequestEntityTooLarge()
+        else:
+            self.buffer.extend(data)
+
+    def next_event(self) -> Event:
+        event: Event = NEED_DATA
+
+        if self.state == State.PREAMBLE:
+            match = self.preamble_re.search(self.buffer)
+            if match is not None:
+                if match.group(1).startswith(b"--"):
+                    self.state = State.EPILOGUE
+                else:
+                    self.state = State.PART
+                data = bytes(self.buffer[: match.start()])
+                del self.buffer[: match.end()]
+                event = Preamble(data=data)
+
+        elif self.state == State.PART:
+            match = BLANK_LINE_RE.search(self.buffer)
+            if match is not None:
+                headers = self._parse_headers(self.buffer[: match.start()])
+                del self.buffer[: match.end()]
+
+                if "content-disposition" not in headers:
+                    raise ValueError("Missing Content-Disposition header")
+
+                disposition, extra = parse_options_header(
+                    headers["content-disposition"]
+                )
+                name = cast(str, extra.get("name"))
+                filename = extra.get("filename")
+                if filename is not None:
+                    event = File(
+                        filename=filename,
+                        headers=headers,
+                        name=name,
+                    )
+                else:
+                    event = Field(
+                        headers=headers,
+                        name=name,
+                    )
+                self.state = State.DATA
+
+        elif self.state == State.DATA:
+            if self.buffer.find(b"--" + self.boundary) == -1:
+                # No complete boundary in the buffer, but there may be
+                # a partial boundary at the end. As the boundary
+                # starts with either a nl or cr find the earliest and
+                # return up to that as data.
+                data_length = del_index = self.last_newline()
+                more_data = True
+            else:
+                match = self.boundary_re.search(self.buffer)
+                if match is not None:
+                    if match.group(1).startswith(b"--"):
+                        self.state = State.EPILOGUE
+                    else:
+                        self.state = State.PART
+                    data_length = match.start()
+                    del_index = match.end()
+                else:
+                    data_length = del_index = self.last_newline()
+                more_data = match is None
+
+            data = bytes(self.buffer[:data_length])
+            del self.buffer[:del_index]
+            if data or not more_data:
+                event = Data(data=data, more_data=more_data)
+
+        elif self.state == State.EPILOGUE and self.complete:
+            event = Epilogue(data=bytes(self.buffer))
+            del self.buffer[:]
+            self.state = State.COMPLETE
+
+        if self.complete and isinstance(event, NeedData):
+            raise ValueError(f"Invalid form-data cannot parse beyond {self.state}")
+
+        return event
+
+    def _parse_headers(self, data: bytes) -> Headers:
+        headers: List[Tuple[str, str]] = []
+        # Merge the continued headers into one line
+        data = HEADER_CONTINUATION_RE.sub(b" ", data)
+        # Now there is one header per line
+        for line in data.splitlines():
+            if line.strip() != b"":
+                name, value = _to_str(line).strip().split(":", 1)
+                headers.append((name.strip(), value.strip()))
+        return Headers(headers)
+
+
+class MultipartEncoder:
+    def __init__(self, boundary: bytes) -> None:
+        self.boundary = boundary
+        self.state = State.PREAMBLE
+
+    def send_event(self, event: Event) -> bytes:
+        if isinstance(event, Preamble) and self.state == State.PREAMBLE:
+            self.state = State.PART
+            return event.data
+        elif isinstance(event, (Field, File)) and self.state in {
+            State.PREAMBLE,
+            State.PART,
+            State.DATA,
+        }:
+            self.state = State.DATA
+            data = b"\r\n--" + self.boundary + b"\r\n"
+            data += b'Content-Disposition: form-data; name="%s"' % _to_bytes(event.name)
+            if isinstance(event, File):
+                data += b'; filename="%s"' % _to_bytes(event.filename)
+            data += b"\r\n"
+            for name, value in cast(Field, event).headers:
+                if name.lower() != "content-disposition":
+                    data += _to_bytes(f"{name}: {value}\r\n")
+            data += b"\r\n"
+            return data
+        elif isinstance(event, Data) and self.state == State.DATA:
+            return event.data
+        elif isinstance(event, Epilogue):
+            self.state = State.COMPLETE
+            return b"\r\n--" + self.boundary + b"--\r\n" + event.data
+        else:
+            raise ValueError(f"Cannot generate {event} in state: {self.state}")
diff --git a/venv/lib/python3.7/site-packages/werkzeug/sansio/request.py b/venv/lib/python3.7/site-packages/werkzeug/sansio/request.py
new file mode 100644
index 00000000..2c21a213
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/sansio/request.py
@@ -0,0 +1,548 @@
+import typing as t
+from datetime import datetime
+
+from .._internal import _to_str
+from ..datastructures import Accept
+from ..datastructures import Authorization
+from ..datastructures import CharsetAccept
+from ..datastructures import ETags
+from ..datastructures import Headers
+from ..datastructures import HeaderSet
+from ..datastructures import IfRange
+from ..datastructures import ImmutableList
+from ..datastructures import ImmutableMultiDict
+from ..datastructures import LanguageAccept
+from ..datastructures import MIMEAccept
+from ..datastructures import MultiDict
+from ..datastructures import Range
+from ..datastructures import RequestCacheControl
+from ..http import parse_accept_header
+from ..http import parse_authorization_header
+from ..http import parse_cache_control_header
+from ..http import parse_cookie
+from ..http import parse_date
+from ..http import parse_etags
+from ..http import parse_if_range_header
+from ..http import parse_list_header
+from ..http import parse_options_header
+from ..http import parse_range_header
+from ..http import parse_set_header
+from ..urls import url_decode
+from ..user_agent import UserAgent
+from ..useragents import _UserAgent as _DeprecatedUserAgent
+from ..utils import cached_property
+from ..utils import header_property
+from .utils import get_current_url
+from .utils import get_host
+
+
+class Request:
+    """Represents the non-IO parts of a HTTP request, including the
+    method, URL info, and headers.
+
+    This class is not meant for general use. It should only be used when
+    implementing WSGI, ASGI, or another HTTP application spec. Werkzeug
+    provides a WSGI implementation at :cls:`werkzeug.wrappers.Request`.
+
+    :param method: The method the request was made with, such as
+        ``GET``.
+    :param scheme: The URL scheme of the protocol the request used, such
+        as ``https`` or ``wss``.
+    :param server: The address of the server. ``(host, port)``,
+        ``(path, None)`` for unix sockets, or ``None`` if not known.
+    :param root_path: The prefix that the application is mounted under.
+        This is prepended to generated URLs, but is not part of route
+        matching.
+    :param path: The path part of the URL after ``root_path``.
+    :param query_string: The part of the URL after the "?".
+    :param headers: The headers received with the request.
+    :param remote_addr: The address of the client sending the request.
+
+    .. versionadded:: 2.0
+    """
+
+    #: The charset used to decode most data in the request.
+    charset = "utf-8"
+
+    #: the error handling procedure for errors, defaults to 'replace'
+    encoding_errors = "replace"
+
+    #: the class to use for `args` and `form`.  The default is an
+    #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports
+    #: multiple values per key.  alternatively it makes sense to use an
+    #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which
+    #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict`
+    #: which is the fastest but only remembers the last key.  It is also
+    #: possible to use mutable structures, but this is not recommended.
+    #:
+    #: .. versionadded:: 0.6
+    parameter_storage_class: t.Type[MultiDict] = ImmutableMultiDict
+
+    #: The type to be used for dict values from the incoming WSGI
+    #: environment. (For example for :attr:`cookies`.) By default an
+    #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used.
+    #:
+    #: .. versionchanged:: 1.0.0
+    #:     Changed to ``ImmutableMultiDict`` to support multiple values.
+    #:
+    #: .. versionadded:: 0.6
+    dict_storage_class: t.Type[MultiDict] = ImmutableMultiDict
+
+    #: the type to be used for list values from the incoming WSGI environment.
+    #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used
+    #: (for example for :attr:`access_list`).
+    #:
+    #: .. versionadded:: 0.6
+    list_storage_class: t.Type[t.List] = ImmutableList
+
+    user_agent_class = _DeprecatedUserAgent
+    """The class used and returned by the :attr:`user_agent` property to
+    parse the header. Defaults to
+    :class:`~werkzeug.user_agent.UserAgent`, which does no parsing. An
+    extension can provide a subclass that uses a parser to provide other
+    data.
+
+    .. versionadded:: 2.0
+    """
+
+    #: Valid host names when handling requests. By default all hosts are
+    #: trusted, which means that whatever the client says the host is
+    #: will be accepted.
+    #:
+    #: Because ``Host`` and ``X-Forwarded-Host`` headers can be set to
+    #: any value by a malicious client, it is recommended to either set
+    #: this property or implement similar validation in the proxy (if
+    #: the application is being run behind one).
+    #:
+    #: .. versionadded:: 0.9
+    trusted_hosts: t.Optional[t.List[str]] = None
+
+    def __init__(
+        self,
+        method: str,
+        scheme: str,
+        server: t.Optional[t.Tuple[str, t.Optional[int]]],
+        root_path: str,
+        path: str,
+        query_string: bytes,
+        headers: Headers,
+        remote_addr: t.Optional[str],
+    ) -> None:
+        #: The method the request was made with, such as ``GET``.
+        self.method = method.upper()
+        #: The URL scheme of the protocol the request used, such as
+        #: ``https`` or ``wss``.
+        self.scheme = scheme
+        #: The address of the server. ``(host, port)``, ``(path, None)``
+        #: for unix sockets, or ``None`` if not known.
+        self.server = server
+        #: The prefix that the application is mounted under, without a
+        #: trailing slash. :attr:`path` comes after this.
+        self.root_path = root_path.rstrip("/")
+        #: The path part of the URL after :attr:`root_path`. This is the
+        #: path used for routing within the application.
+        self.path = "/" + path.lstrip("/")
+        #: The part of the URL after the "?". This is the raw value, use
+        #: :attr:`args` for the parsed values.
+        self.query_string = query_string
+        #: The headers received with the request.
+        self.headers = headers
+        #: The address of the client sending the request.
+        self.remote_addr = remote_addr
+
+    def __repr__(self) -> str:
+        try:
+            url = self.url
+        except Exception as e:
+            url = f"(invalid URL: {e})"
+
+        return f"<{type(self).__name__} {url!r} [{self.method}]>"
+
+    @property
+    def url_charset(self) -> str:
+        """The charset that is assumed for URLs. Defaults to the value
+        of :attr:`charset`.
+
+        .. versionadded:: 0.6
+        """
+        return self.charset
+
+    @cached_property
+    def args(self) -> "MultiDict[str, str]":
+        """The parsed URL parameters (the part in the URL after the question
+        mark).
+
+        By default an
+        :class:`~werkzeug.datastructures.ImmutableMultiDict`
+        is returned from this function.  This can be changed by setting
+        :attr:`parameter_storage_class` to a different type.  This might
+        be necessary if the order of the form data is important.
+        """
+        return url_decode(
+            self.query_string,
+            self.url_charset,
+            errors=self.encoding_errors,
+            cls=self.parameter_storage_class,
+        )
+
+    @cached_property
+    def access_route(self) -> t.List[str]:
+        """If a forwarded header exists this is a list of all ip addresses
+        from the client ip to the last proxy server.
+        """
+        if "X-Forwarded-For" in self.headers:
+            return self.list_storage_class(
+                parse_list_header(self.headers["X-Forwarded-For"])
+            )
+        elif self.remote_addr is not None:
+            return self.list_storage_class([self.remote_addr])
+        return self.list_storage_class()
+
+    @cached_property
+    def full_path(self) -> str:
+        """Requested path, including the query string."""
+        return f"{self.path}?{_to_str(self.query_string, self.url_charset)}"
+
+    @property
+    def is_secure(self) -> bool:
+        """``True`` if the request was made with a secure protocol
+        (HTTPS or WSS).
+        """
+        return self.scheme in {"https", "wss"}
+
+    @cached_property
+    def url(self) -> str:
+        """The full request URL with the scheme, host, root path, path,
+        and query string."""
+        return get_current_url(
+            self.scheme, self.host, self.root_path, self.path, self.query_string
+        )
+
+    @cached_property
+    def base_url(self) -> str:
+        """Like :attr:`url` but without the query string."""
+        return get_current_url(self.scheme, self.host, self.root_path, self.path)
+
+    @cached_property
+    def root_url(self) -> str:
+        """The request URL scheme, host, and root path. This is the root
+        that the application is accessed from.
+        """
+        return get_current_url(self.scheme, self.host, self.root_path)
+
+    @cached_property
+    def host_url(self) -> str:
+        """The request URL scheme and host only."""
+        return get_current_url(self.scheme, self.host)
+
+    @cached_property
+    def host(self) -> str:
+        """The host name the request was made to, including the port if
+        it's non-standard. Validated with :attr:`trusted_hosts`.
+        """
+        return get_host(
+            self.scheme, self.headers.get("host"), self.server, self.trusted_hosts
+        )
+
+    @cached_property
+    def cookies(self) -> "ImmutableMultiDict[str, str]":
+        """A :class:`dict` with the contents of all cookies transmitted with
+        the request."""
+        wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie"))
+        return parse_cookie(  # type: ignore
+            wsgi_combined_cookie,
+            self.charset,
+            self.encoding_errors,
+            cls=self.dict_storage_class,
+        )
+
+    # Common Descriptors
+
+    content_type = header_property[str](
+        "Content-Type",
+        doc="""The Content-Type entity-header field indicates the media
+        type of the entity-body sent to the recipient or, in the case of
+        the HEAD method, the media type that would have been sent had
+        the request been a GET.""",
+        read_only=True,
+    )
+
+    @cached_property
+    def content_length(self) -> t.Optional[int]:
+        """The Content-Length entity-header field indicates the size of the
+        entity-body in bytes or, in the case of the HEAD method, the size of
+        the entity-body that would have been sent had the request been a
+        GET.
+        """
+        if self.headers.get("Transfer-Encoding", "") == "chunked":
+            return None
+
+        content_length = self.headers.get("Content-Length")
+        if content_length is not None:
+            try:
+                return max(0, int(content_length))
+            except (ValueError, TypeError):
+                pass
+
+        return None
+
+    content_encoding = header_property[str](
+        "Content-Encoding",
+        doc="""The Content-Encoding entity-header field is used as a
+        modifier to the media-type. When present, its value indicates
+        what additional content codings have been applied to the
+        entity-body, and thus what decoding mechanisms must be applied
+        in order to obtain the media-type referenced by the Content-Type
+        header field.
+
+        .. versionadded:: 0.9""",
+        read_only=True,
+    )
+    content_md5 = header_property[str](
+        "Content-MD5",
+        doc="""The Content-MD5 entity-header field, as defined in
+        RFC 1864, is an MD5 digest of the entity-body for the purpose of
+        providing an end-to-end message integrity check (MIC) of the
+        entity-body. (Note: a MIC is good for detecting accidental
+        modification of the entity-body in transit, but is not proof
+        against malicious attacks.)
+
+        .. versionadded:: 0.9""",
+        read_only=True,
+    )
+    referrer = header_property[str](
+        "Referer",
+        doc="""The Referer[sic] request-header field allows the client
+        to specify, for the server's benefit, the address (URI) of the
+        resource from which the Request-URI was obtained (the
+        "referrer", although the header field is misspelled).""",
+        read_only=True,
+    )
+    date = header_property(
+        "Date",
+        None,
+        parse_date,
+        doc="""The Date general-header field represents the date and
+        time at which the message was originated, having the same
+        semantics as orig-date in RFC 822.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """,
+        read_only=True,
+    )
+    max_forwards = header_property(
+        "Max-Forwards",
+        None,
+        int,
+        doc="""The Max-Forwards request-header field provides a
+        mechanism with the TRACE and OPTIONS methods to limit the number
+        of proxies or gateways that can forward the request to the next
+        inbound server.""",
+        read_only=True,
+    )
+
+    def _parse_content_type(self) -> None:
+        if not hasattr(self, "_parsed_content_type"):
+            self._parsed_content_type = parse_options_header(
+                self.headers.get("Content-Type", "")
+            )
+
+    @property
+    def mimetype(self) -> str:
+        """Like :attr:`content_type`, but without parameters (eg, without
+        charset, type etc.) and always lowercase.  For example if the content
+        type is ``text/HTML; charset=utf-8`` the mimetype would be
+        ``'text/html'``.
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[0].lower()
+
+    @property
+    def mimetype_params(self) -> t.Dict[str, str]:
+        """The mimetype parameters as dict.  For example if the content
+        type is ``text/html; charset=utf-8`` the params would be
+        ``{'charset': 'utf-8'}``.
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[1]
+
+    @cached_property
+    def pragma(self) -> HeaderSet:
+        """The Pragma general-header field is used to include
+        implementation-specific directives that might apply to any recipient
+        along the request/response chain.  All pragma directives specify
+        optional behavior from the viewpoint of the protocol; however, some
+        systems MAY require that behavior be consistent with the directives.
+        """
+        return parse_set_header(self.headers.get("Pragma", ""))
+
+    # Accept
+
+    @cached_property
+    def accept_mimetypes(self) -> MIMEAccept:
+        """List of mimetypes this client supports as
+        :class:`~werkzeug.datastructures.MIMEAccept` object.
+        """
+        return parse_accept_header(self.headers.get("Accept"), MIMEAccept)
+
+    @cached_property
+    def accept_charsets(self) -> CharsetAccept:
+        """List of charsets this client supports as
+        :class:`~werkzeug.datastructures.CharsetAccept` object.
+        """
+        return parse_accept_header(self.headers.get("Accept-Charset"), CharsetAccept)
+
+    @cached_property
+    def accept_encodings(self) -> Accept:
+        """List of encodings this client accepts.  Encodings in a HTTP term
+        are compression encodings such as gzip.  For charsets have a look at
+        :attr:`accept_charset`.
+        """
+        return parse_accept_header(self.headers.get("Accept-Encoding"))
+
+    @cached_property
+    def accept_languages(self) -> LanguageAccept:
+        """List of languages this client accepts as
+        :class:`~werkzeug.datastructures.LanguageAccept` object.
+
+        .. versionchanged 0.5
+           In previous versions this was a regular
+           :class:`~werkzeug.datastructures.Accept` object.
+        """
+        return parse_accept_header(self.headers.get("Accept-Language"), LanguageAccept)
+
+    # ETag
+
+    @cached_property
+    def cache_control(self) -> RequestCacheControl:
+        """A :class:`~werkzeug.datastructures.RequestCacheControl` object
+        for the incoming cache control headers.
+        """
+        cache_control = self.headers.get("Cache-Control")
+        return parse_cache_control_header(cache_control, None, RequestCacheControl)
+
+    @cached_property
+    def if_match(self) -> ETags:
+        """An object containing all the etags in the `If-Match` header.
+
+        :rtype: :class:`~werkzeug.datastructures.ETags`
+        """
+        return parse_etags(self.headers.get("If-Match"))
+
+    @cached_property
+    def if_none_match(self) -> ETags:
+        """An object containing all the etags in the `If-None-Match` header.
+
+        :rtype: :class:`~werkzeug.datastructures.ETags`
+        """
+        return parse_etags(self.headers.get("If-None-Match"))
+
+    @cached_property
+    def if_modified_since(self) -> t.Optional[datetime]:
+        """The parsed `If-Modified-Since` header as a datetime object.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """
+        return parse_date(self.headers.get("If-Modified-Since"))
+
+    @cached_property
+    def if_unmodified_since(self) -> t.Optional[datetime]:
+        """The parsed `If-Unmodified-Since` header as a datetime object.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """
+        return parse_date(self.headers.get("If-Unmodified-Since"))
+
+    @cached_property
+    def if_range(self) -> IfRange:
+        """The parsed ``If-Range`` header.
+
+        .. versionchanged:: 2.0
+            ``IfRange.date`` is timezone-aware.
+
+        .. versionadded:: 0.7
+        """
+        return parse_if_range_header(self.headers.get("If-Range"))
+
+    @cached_property
+    def range(self) -> t.Optional[Range]:
+        """The parsed `Range` header.
+
+        .. versionadded:: 0.7
+
+        :rtype: :class:`~werkzeug.datastructures.Range`
+        """
+        return parse_range_header(self.headers.get("Range"))
+
+    # User Agent
+
+    @cached_property
+    def user_agent(self) -> UserAgent:
+        """The user agent. Use ``user_agent.string`` to get the header
+        value. Set :attr:`user_agent_class` to a subclass of
+        :class:`~werkzeug.user_agent.UserAgent` to provide parsing for
+        the other properties or other extended data.
+
+        .. versionchanged:: 2.0
+            The built in parser is deprecated and will be removed in
+            Werkzeug 2.1. A ``UserAgent`` subclass must be set to parse
+            data from the string.
+        """
+        return self.user_agent_class(self.headers.get("User-Agent", ""))
+
+    # Authorization
+
+    @cached_property
+    def authorization(self) -> t.Optional[Authorization]:
+        """The `Authorization` object in parsed form."""
+        return parse_authorization_header(self.headers.get("Authorization"))
+
+    # CORS
+
+    origin = header_property[str](
+        "Origin",
+        doc=(
+            "The host that the request originated from. Set"
+            " :attr:`~CORSResponseMixin.access_control_allow_origin` on"
+            " the response to indicate which origins are allowed."
+        ),
+        read_only=True,
+    )
+
+    access_control_request_headers = header_property(
+        "Access-Control-Request-Headers",
+        load_func=parse_set_header,
+        doc=(
+            "Sent with a preflight request to indicate which headers"
+            " will be sent with the cross origin request. Set"
+            " :attr:`~CORSResponseMixin.access_control_allow_headers`"
+            " on the response to indicate which headers are allowed."
+        ),
+        read_only=True,
+    )
+
+    access_control_request_method = header_property[str](
+        "Access-Control-Request-Method",
+        doc=(
+            "Sent with a preflight request to indicate which method"
+            " will be used for the cross origin request. Set"
+            " :attr:`~CORSResponseMixin.access_control_allow_methods`"
+            " on the response to indicate which methods are allowed."
+        ),
+        read_only=True,
+    )
+
+    @property
+    def is_json(self) -> bool:
+        """Check if the mimetype indicates JSON data, either
+        :mimetype:`application/json` or :mimetype:`application/*+json`.
+        """
+        mt = self.mimetype
+        return (
+            mt == "application/json"
+            or mt.startswith("application/")
+            and mt.endswith("+json")
+        )
diff --git a/venv/lib/python3.7/site-packages/werkzeug/sansio/response.py b/venv/lib/python3.7/site-packages/werkzeug/sansio/response.py
new file mode 100644
index 00000000..aedfcb04
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/sansio/response.py
@@ -0,0 +1,656 @@
+import typing
+import typing as t
+from datetime import datetime
+from datetime import timedelta
+from datetime import timezone
+from http import HTTPStatus
+
+from .._internal import _to_str
+from ..datastructures import Headers
+from ..datastructures import HeaderSet
+from ..http import dump_cookie
+from ..http import HTTP_STATUS_CODES
+from ..utils import get_content_type
+from werkzeug.datastructures import CallbackDict
+from werkzeug.datastructures import ContentRange
+from werkzeug.datastructures import ResponseCacheControl
+from werkzeug.datastructures import WWWAuthenticate
+from werkzeug.http import COEP
+from werkzeug.http import COOP
+from werkzeug.http import dump_age
+from werkzeug.http import dump_csp_header
+from werkzeug.http import dump_header
+from werkzeug.http import dump_options_header
+from werkzeug.http import http_date
+from werkzeug.http import parse_age
+from werkzeug.http import parse_cache_control_header
+from werkzeug.http import parse_content_range_header
+from werkzeug.http import parse_csp_header
+from werkzeug.http import parse_date
+from werkzeug.http import parse_options_header
+from werkzeug.http import parse_set_header
+from werkzeug.http import parse_www_authenticate_header
+from werkzeug.http import quote_etag
+from werkzeug.http import unquote_etag
+from werkzeug.utils import header_property
+
+
+def _set_property(name: str, doc: t.Optional[str] = None) -> property:
+    def fget(self: "Response") -> HeaderSet:
+        def on_update(header_set: HeaderSet) -> None:
+            if not header_set and name in self.headers:
+                del self.headers[name]
+            elif header_set:
+                self.headers[name] = header_set.to_header()
+
+        return parse_set_header(self.headers.get(name), on_update)
+
+    def fset(
+        self: "Response",
+        value: t.Optional[
+            t.Union[str, t.Dict[str, t.Union[str, int]], t.Iterable[str]]
+        ],
+    ) -> None:
+        if not value:
+            del self.headers[name]
+        elif isinstance(value, str):
+            self.headers[name] = value
+        else:
+            self.headers[name] = dump_header(value)
+
+    return property(fget, fset, doc=doc)
+
+
+class Response:
+    """Represents the non-IO parts of an HTTP response, specifically the
+    status and headers but not the body.
+
+    This class is not meant for general use. It should only be used when
+    implementing WSGI, ASGI, or another HTTP application spec. Werkzeug
+    provides a WSGI implementation at :cls:`werkzeug.wrappers.Response`.
+
+    :param status: The status code for the response. Either an int, in
+        which case the default status message is added, or a string in
+        the form ``{code} {message}``, like ``404 Not Found``. Defaults
+        to 200.
+    :param headers: A :class:`~werkzeug.datastructures.Headers` object,
+        or a list of ``(key, value)`` tuples that will be converted to a
+        ``Headers`` object.
+    :param mimetype: The mime type (content type without charset or
+        other parameters) of the response. If the value starts with
+        ``text/`` (or matches some other special cases), the charset
+        will be added to create the ``content_type``.
+    :param content_type: The full content type of the response.
+        Overrides building the value from ``mimetype``.
+
+    .. versionadded:: 2.0
+    """
+
+    #: the charset of the response.
+    charset = "utf-8"
+
+    #: the default status if none is provided.
+    default_status = 200
+
+    #: the default mimetype if none is provided.
+    default_mimetype = "text/plain"
+
+    #: Warn if a cookie header exceeds this size. The default, 4093, should be
+    #: safely `supported by most browsers <cookie_>`_. A cookie larger than
+    #: this size will still be sent, but it may be ignored or handled
+    #: incorrectly by some browsers. Set to 0 to disable this check.
+    #:
+    #: .. versionadded:: 0.13
+    #:
+    #: .. _`cookie`: http://browsercookielimits.squawky.net/
+    max_cookie_size = 4093
+
+    # A :class:`Headers` object representing the response headers.
+    headers: Headers
+
+    def __init__(
+        self,
+        status: t.Optional[t.Union[int, str, HTTPStatus]] = None,
+        headers: t.Optional[
+            t.Union[
+                t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]],
+                t.Iterable[t.Tuple[str, t.Union[str, int]]],
+            ]
+        ] = None,
+        mimetype: t.Optional[str] = None,
+        content_type: t.Optional[str] = None,
+    ) -> None:
+        if isinstance(headers, Headers):
+            self.headers = headers
+        elif not headers:
+            self.headers = Headers()
+        else:
+            self.headers = Headers(headers)
+
+        if content_type is None:
+            if mimetype is None and "content-type" not in self.headers:
+                mimetype = self.default_mimetype
+            if mimetype is not None:
+                mimetype = get_content_type(mimetype, self.charset)
+            content_type = mimetype
+        if content_type is not None:
+            self.headers["Content-Type"] = content_type
+        if status is None:
+            status = self.default_status
+        self.status = status  # type: ignore
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} [{self.status}]>"
+
+    @property
+    def status_code(self) -> int:
+        """The HTTP status code as a number."""
+        return self._status_code
+
+    @status_code.setter
+    def status_code(self, code: int) -> None:
+        self.status = code  # type: ignore
+
+    @property
+    def status(self) -> str:
+        """The HTTP status code as a string."""
+        return self._status
+
+    @status.setter
+    def status(self, value: t.Union[str, int, HTTPStatus]) -> None:
+        if not isinstance(value, (str, bytes, int, HTTPStatus)):
+            raise TypeError("Invalid status argument")
+
+        self._status, self._status_code = self._clean_status(value)
+
+    def _clean_status(self, value: t.Union[str, int, HTTPStatus]) -> t.Tuple[str, int]:
+        if isinstance(value, HTTPStatus):
+            value = int(value)
+        status = _to_str(value, self.charset)
+        split_status = status.split(None, 1)
+
+        if len(split_status) == 0:
+            raise ValueError("Empty status argument")
+
+        if len(split_status) > 1:
+            if split_status[0].isdigit():
+                # code and message
+                return status, int(split_status[0])
+
+            # multi-word message
+            return f"0 {status}", 0
+
+        if split_status[0].isdigit():
+            # code only
+            status_code = int(split_status[0])
+
+            try:
+                status = f"{status_code} {HTTP_STATUS_CODES[status_code].upper()}"
+            except KeyError:
+                status = f"{status_code} UNKNOWN"
+
+            return status, status_code
+
+        # one-word message
+        return f"0 {status}", 0
+
+    def set_cookie(
+        self,
+        key: str,
+        value: str = "",
+        max_age: t.Optional[t.Union[timedelta, int]] = None,
+        expires: t.Optional[t.Union[str, datetime, int, float]] = None,
+        path: t.Optional[str] = "/",
+        domain: t.Optional[str] = None,
+        secure: bool = False,
+        httponly: bool = False,
+        samesite: t.Optional[str] = None,
+    ) -> None:
+        """Sets a cookie.
+
+        A warning is raised if the size of the cookie header exceeds
+        :attr:`max_cookie_size`, but the header will still be set.
+
+        :param key: the key (name) of the cookie to be set.
+        :param value: the value of the cookie.
+        :param max_age: should be a number of seconds, or `None` (default) if
+                        the cookie should last only as long as the client's
+                        browser session.
+        :param expires: should be a `datetime` object or UNIX timestamp.
+        :param path: limits the cookie to a given path, per default it will
+                     span the whole domain.
+        :param domain: if you want to set a cross-domain cookie.  For example,
+                       ``domain=".example.com"`` will set a cookie that is
+                       readable by the domain ``www.example.com``,
+                       ``foo.example.com`` etc.  Otherwise, a cookie will only
+                       be readable by the domain that set it.
+        :param secure: If ``True``, the cookie will only be available
+            via HTTPS.
+        :param httponly: Disallow JavaScript access to the cookie.
+        :param samesite: Limit the scope of the cookie to only be
+            attached to requests that are "same-site".
+        """
+        self.headers.add(
+            "Set-Cookie",
+            dump_cookie(
+                key,
+                value=value,
+                max_age=max_age,
+                expires=expires,
+                path=path,
+                domain=domain,
+                secure=secure,
+                httponly=httponly,
+                charset=self.charset,
+                max_size=self.max_cookie_size,
+                samesite=samesite,
+            ),
+        )
+
+    def delete_cookie(
+        self,
+        key: str,
+        path: str = "/",
+        domain: t.Optional[str] = None,
+        secure: bool = False,
+        httponly: bool = False,
+        samesite: t.Optional[str] = None,
+    ) -> None:
+        """Delete a cookie.  Fails silently if key doesn't exist.
+
+        :param key: the key (name) of the cookie to be deleted.
+        :param path: if the cookie that should be deleted was limited to a
+                     path, the path has to be defined here.
+        :param domain: if the cookie that should be deleted was limited to a
+                       domain, that domain has to be defined here.
+        :param secure: If ``True``, the cookie will only be available
+            via HTTPS.
+        :param httponly: Disallow JavaScript access to the cookie.
+        :param samesite: Limit the scope of the cookie to only be
+            attached to requests that are "same-site".
+        """
+        self.set_cookie(
+            key,
+            expires=0,
+            max_age=0,
+            path=path,
+            domain=domain,
+            secure=secure,
+            httponly=httponly,
+            samesite=samesite,
+        )
+
+    @property
+    def is_json(self) -> bool:
+        """Check if the mimetype indicates JSON data, either
+        :mimetype:`application/json` or :mimetype:`application/*+json`.
+        """
+        mt = self.mimetype
+        return mt is not None and (
+            mt == "application/json"
+            or mt.startswith("application/")
+            and mt.endswith("+json")
+        )
+
+    # Common Descriptors
+
+    @property
+    def mimetype(self) -> t.Optional[str]:
+        """The mimetype (content type without charset etc.)"""
+        ct = self.headers.get("content-type")
+
+        if ct:
+            return ct.split(";")[0].strip()
+        else:
+            return None
+
+    @mimetype.setter
+    def mimetype(self, value: str) -> None:
+        self.headers["Content-Type"] = get_content_type(value, self.charset)
+
+    @property
+    def mimetype_params(self) -> t.Dict[str, str]:
+        """The mimetype parameters as dict. For example if the
+        content type is ``text/html; charset=utf-8`` the params would be
+        ``{'charset': 'utf-8'}``.
+
+        .. versionadded:: 0.5
+        """
+
+        def on_update(d: t.Dict[str, str]) -> None:
+            self.headers["Content-Type"] = dump_options_header(self.mimetype, d)
+
+        d = parse_options_header(self.headers.get("content-type", ""))[1]
+        return CallbackDict(d, on_update)
+
+    location = header_property[str](
+        "Location",
+        doc="""The Location response-header field is used to redirect
+        the recipient to a location other than the Request-URI for
+        completion of the request or identification of a new
+        resource.""",
+    )
+    age = header_property(
+        "Age",
+        None,
+        parse_age,
+        dump_age,  # type: ignore
+        doc="""The Age response-header field conveys the sender's
+        estimate of the amount of time since the response (or its
+        revalidation) was generated at the origin server.
+
+        Age values are non-negative decimal integers, representing time
+        in seconds.""",
+    )
+    content_type = header_property[str](
+        "Content-Type",
+        doc="""The Content-Type entity-header field indicates the media
+        type of the entity-body sent to the recipient or, in the case of
+        the HEAD method, the media type that would have been sent had
+        the request been a GET.""",
+    )
+    content_length = header_property(
+        "Content-Length",
+        None,
+        int,
+        str,
+        doc="""The Content-Length entity-header field indicates the size
+        of the entity-body, in decimal number of OCTETs, sent to the
+        recipient or, in the case of the HEAD method, the size of the
+        entity-body that would have been sent had the request been a
+        GET.""",
+    )
+    content_location = header_property[str](
+        "Content-Location",
+        doc="""The Content-Location entity-header field MAY be used to
+        supply the resource location for the entity enclosed in the
+        message when that entity is accessible from a location separate
+        from the requested resource's URI.""",
+    )
+    content_encoding = header_property[str](
+        "Content-Encoding",
+        doc="""The Content-Encoding entity-header field is used as a
+        modifier to the media-type. When present, its value indicates
+        what additional content codings have been applied to the
+        entity-body, and thus what decoding mechanisms must be applied
+        in order to obtain the media-type referenced by the Content-Type
+        header field.""",
+    )
+    content_md5 = header_property[str](
+        "Content-MD5",
+        doc="""The Content-MD5 entity-header field, as defined in
+        RFC 1864, is an MD5 digest of the entity-body for the purpose of
+        providing an end-to-end message integrity check (MIC) of the
+        entity-body. (Note: a MIC is good for detecting accidental
+        modification of the entity-body in transit, but is not proof
+        against malicious attacks.)""",
+    )
+    date = header_property(
+        "Date",
+        None,
+        parse_date,
+        http_date,
+        doc="""The Date general-header field represents the date and
+        time at which the message was originated, having the same
+        semantics as orig-date in RFC 822.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """,
+    )
+    expires = header_property(
+        "Expires",
+        None,
+        parse_date,
+        http_date,
+        doc="""The Expires entity-header field gives the date/time after
+        which the response is considered stale. A stale cache entry may
+        not normally be returned by a cache.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """,
+    )
+    last_modified = header_property(
+        "Last-Modified",
+        None,
+        parse_date,
+        http_date,
+        doc="""The Last-Modified entity-header field indicates the date
+        and time at which the origin server believes the variant was
+        last modified.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """,
+    )
+
+    @property
+    def retry_after(self) -> t.Optional[datetime]:
+        """The Retry-After response-header field can be used with a
+        503 (Service Unavailable) response to indicate how long the
+        service is expected to be unavailable to the requesting client.
+
+        Time in seconds until expiration or date.
+
+        .. versionchanged:: 2.0
+            The datetime object is timezone-aware.
+        """
+        value = self.headers.get("retry-after")
+        if value is None:
+            return None
+        elif value.isdigit():
+            return datetime.now(timezone.utc) + timedelta(seconds=int(value))
+        return parse_date(value)
+
+    @retry_after.setter
+    def retry_after(self, value: t.Optional[t.Union[datetime, int, str]]) -> None:
+        if value is None:
+            if "retry-after" in self.headers:
+                del self.headers["retry-after"]
+            return
+        elif isinstance(value, datetime):
+            value = http_date(value)
+        else:
+            value = str(value)
+        self.headers["Retry-After"] = value
+
+    vary = _set_property(
+        "Vary",
+        doc="""The Vary field value indicates the set of request-header
+        fields that fully determines, while the response is fresh,
+        whether a cache is permitted to use the response to reply to a
+        subsequent request without revalidation.""",
+    )
+    content_language = _set_property(
+        "Content-Language",
+        doc="""The Content-Language entity-header field describes the
+        natural language(s) of the intended audience for the enclosed
+        entity. Note that this might not be equivalent to all the
+        languages used within the entity-body.""",
+    )
+    allow = _set_property(
+        "Allow",
+        doc="""The Allow entity-header field lists the set of methods
+        supported by the resource identified by the Request-URI. The
+        purpose of this field is strictly to inform the recipient of
+        valid methods associated with the resource. An Allow header
+        field MUST be present in a 405 (Method Not Allowed)
+        response.""",
+    )
+
+    # ETag
+
+    @property
+    def cache_control(self) -> ResponseCacheControl:
+        """The Cache-Control general-header field is used to specify
+        directives that MUST be obeyed by all caching mechanisms along the
+        request/response chain.
+        """
+
+        def on_update(cache_control: ResponseCacheControl) -> None:
+            if not cache_control and "cache-control" in self.headers:
+                del self.headers["cache-control"]
+            elif cache_control:
+                self.headers["Cache-Control"] = cache_control.to_header()
+
+        return parse_cache_control_header(
+            self.headers.get("cache-control"), on_update, ResponseCacheControl
+        )
+
+    def set_etag(self, etag: str, weak: bool = False) -> None:
+        """Set the etag, and override the old one if there was one."""
+        self.headers["ETag"] = quote_etag(etag, weak)
+
+    def get_etag(self) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]:
+        """Return a tuple in the form ``(etag, is_weak)``.  If there is no
+        ETag the return value is ``(None, None)``.
+        """
+        return unquote_etag(self.headers.get("ETag"))
+
+    accept_ranges = header_property[str](
+        "Accept-Ranges",
+        doc="""The `Accept-Ranges` header. Even though the name would
+        indicate that multiple values are supported, it must be one
+        string token only.
+
+        The values ``'bytes'`` and ``'none'`` are common.
+
+        .. versionadded:: 0.7""",
+    )
+
+    @property
+    def content_range(self) -> ContentRange:
+        """The ``Content-Range`` header as a
+        :class:`~werkzeug.datastructures.ContentRange` object. Available
+        even if the header is not set.
+
+        .. versionadded:: 0.7
+        """
+
+        def on_update(rng: ContentRange) -> None:
+            if not rng:
+                del self.headers["content-range"]
+            else:
+                self.headers["Content-Range"] = rng.to_header()
+
+        rv = parse_content_range_header(self.headers.get("content-range"), on_update)
+        # always provide a content range object to make the descriptor
+        # more user friendly.  It provides an unset() method that can be
+        # used to remove the header quickly.
+        if rv is None:
+            rv = ContentRange(None, None, None, on_update=on_update)
+        return rv
+
+    @content_range.setter
+    def content_range(self, value: t.Optional[t.Union[ContentRange, str]]) -> None:
+        if not value:
+            del self.headers["content-range"]
+        elif isinstance(value, str):
+            self.headers["Content-Range"] = value
+        else:
+            self.headers["Content-Range"] = value.to_header()
+
+    # Authorization
+
+    @property
+    def www_authenticate(self) -> WWWAuthenticate:
+        """The ``WWW-Authenticate`` header in a parsed form."""
+
+        def on_update(www_auth: WWWAuthenticate) -> None:
+            if not www_auth and "www-authenticate" in self.headers:
+                del self.headers["www-authenticate"]
+            elif www_auth:
+                self.headers["WWW-Authenticate"] = www_auth.to_header()
+
+        header = self.headers.get("www-authenticate")
+        return parse_www_authenticate_header(header, on_update)
+
+    # CSP
+
+    content_security_policy = header_property(
+        "Content-Security-Policy",
+        None,
+        parse_csp_header,  # type: ignore
+        dump_csp_header,
+        doc="""The Content-Security-Policy header adds an additional layer of
+        security to help detect and mitigate certain types of attacks.""",
+    )
+    content_security_policy_report_only = header_property(
+        "Content-Security-Policy-Report-Only",
+        None,
+        parse_csp_header,  # type: ignore
+        dump_csp_header,
+        doc="""The Content-Security-Policy-Report-Only header adds a csp policy
+        that is not enforced but is reported thereby helping detect
+        certain types of attacks.""",
+    )
+
+    # CORS
+
+    @property
+    def access_control_allow_credentials(self) -> bool:
+        """Whether credentials can be shared by the browser to
+        JavaScript code. As part of the preflight request it indicates
+        whether credentials can be used on the cross origin request.
+        """
+        return "Access-Control-Allow-Credentials" in self.headers
+
+    @access_control_allow_credentials.setter
+    def access_control_allow_credentials(self, value: t.Optional[bool]) -> None:
+        if value is True:
+            self.headers["Access-Control-Allow-Credentials"] = "true"
+        else:
+            self.headers.pop("Access-Control-Allow-Credentials", None)
+
+    access_control_allow_headers = header_property(
+        "Access-Control-Allow-Headers",
+        load_func=parse_set_header,
+        dump_func=dump_header,
+        doc="Which headers can be sent with the cross origin request.",
+    )
+
+    access_control_allow_methods = header_property(
+        "Access-Control-Allow-Methods",
+        load_func=parse_set_header,
+        dump_func=dump_header,
+        doc="Which methods can be used for the cross origin request.",
+    )
+
+    access_control_allow_origin = header_property[str](
+        "Access-Control-Allow-Origin",
+        doc="The origin or '*' for any origin that may make cross origin requests.",
+    )
+
+    access_control_expose_headers = header_property(
+        "Access-Control-Expose-Headers",
+        load_func=parse_set_header,
+        dump_func=dump_header,
+        doc="Which headers can be shared by the browser to JavaScript code.",
+    )
+
+    access_control_max_age = header_property(
+        "Access-Control-Max-Age",
+        load_func=int,
+        dump_func=str,
+        doc="The maximum age in seconds the access control settings can be cached for.",
+    )
+
+    cross_origin_opener_policy = header_property[COOP](
+        "Cross-Origin-Opener-Policy",
+        load_func=lambda value: COOP(value),
+        dump_func=lambda value: value.value,
+        default=COOP.UNSAFE_NONE,
+        doc="""Allows control over sharing of browsing context group with cross-origin
+        documents. Values must be a member of the :class:`werkzeug.http.COOP` enum.""",
+    )
+
+    cross_origin_embedder_policy = header_property[COEP](
+        "Cross-Origin-Embedder-Policy",
+        load_func=lambda value: COEP(value),
+        dump_func=lambda value: value.value,
+        default=COEP.UNSAFE_NONE,
+        doc="""Prevents a document from loading any cross-origin resources that do not
+        explicitly grant the document permission. Values must be a member of the
+        :class:`werkzeug.http.COEP` enum.""",
+    )
diff --git a/venv/lib/python3.7/site-packages/werkzeug/sansio/utils.py b/venv/lib/python3.7/site-packages/werkzeug/sansio/utils.py
new file mode 100644
index 00000000..1b4d8920
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/sansio/utils.py
@@ -0,0 +1,142 @@
+import typing as t
+
+from .._internal import _encode_idna
+from ..exceptions import SecurityError
+from ..urls import uri_to_iri
+from ..urls import url_quote
+
+
+def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool:
+    """Check if a host matches a list of trusted names.
+
+    :param hostname: The name to check.
+    :param trusted_list: A list of valid names to match. If a name
+        starts with a dot it will match all subdomains.
+
+    .. versionadded:: 0.9
+    """
+    if not hostname:
+        return False
+
+    if isinstance(trusted_list, str):
+        trusted_list = [trusted_list]
+
+    def _normalize(hostname: str) -> bytes:
+        if ":" in hostname:
+            hostname = hostname.rsplit(":", 1)[0]
+
+        return _encode_idna(hostname)
+
+    try:
+        hostname_bytes = _normalize(hostname)
+    except UnicodeError:
+        return False
+
+    for ref in trusted_list:
+        if ref.startswith("."):
+            ref = ref[1:]
+            suffix_match = True
+        else:
+            suffix_match = False
+
+        try:
+            ref_bytes = _normalize(ref)
+        except UnicodeError:
+            return False
+
+        if ref_bytes == hostname_bytes:
+            return True
+
+        if suffix_match and hostname_bytes.endswith(b"." + ref_bytes):
+            return True
+
+    return False
+
+
+def get_host(
+    scheme: str,
+    host_header: t.Optional[str],
+    server: t.Optional[t.Tuple[str, t.Optional[int]]] = None,
+    trusted_hosts: t.Optional[t.Iterable[str]] = None,
+) -> str:
+    """Return the host for the given parameters.
+
+    This first checks the ``host_header``. If it's not present, then
+    ``server`` is used. The host will only contain the port if it is
+    different than the standard port for the protocol.
+
+    Optionally, verify that the host is trusted using
+    :func:`host_is_trusted` and raise a
+    :exc:`~werkzeug.exceptions.SecurityError` if it is not.
+
+    :param scheme: The protocol the request used, like ``"https"``.
+    :param host_header: The ``Host`` header value.
+    :param server: Address of the server. ``(host, port)``, or
+        ``(path, None)`` for unix sockets.
+    :param trusted_hosts: A list of trusted host names.
+
+    :return: Host, with port if necessary.
+    :raise ~werkzeug.exceptions.SecurityError: If the host is not
+        trusted.
+    """
+    host = ""
+
+    if host_header is not None:
+        host = host_header
+    elif server is not None:
+        host = server[0]
+
+        if server[1] is not None:
+            host = f"{host}:{server[1]}"
+
+    if scheme in {"http", "ws"} and host.endswith(":80"):
+        host = host[:-3]
+    elif scheme in {"https", "wss"} and host.endswith(":443"):
+        host = host[:-4]
+
+    if trusted_hosts is not None:
+        if not host_is_trusted(host, trusted_hosts):
+            raise SecurityError(f"Host {host!r} is not trusted.")
+
+    return host
+
+
+def get_current_url(
+    scheme: str,
+    host: str,
+    root_path: t.Optional[str] = None,
+    path: t.Optional[str] = None,
+    query_string: t.Optional[bytes] = None,
+) -> str:
+    """Recreate the URL for a request. If an optional part isn't
+    provided, it and subsequent parts are not included in the URL.
+
+    The URL is an IRI, not a URI, so it may contain Unicode characters.
+    Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII.
+
+    :param scheme: The protocol the request used, like ``"https"``.
+    :param host: The host the request was made to. See :func:`get_host`.
+    :param root_path: Prefix that the application is mounted under. This
+        is prepended to ``path``.
+    :param path: The path part of the URL after ``root_path``.
+    :param query_string: The portion of the URL after the "?".
+    """
+    url = [scheme, "://", host]
+
+    if root_path is None:
+        url.append("/")
+        return uri_to_iri("".join(url))
+
+    url.append(url_quote(root_path.rstrip("/")))
+    url.append("/")
+
+    if path is None:
+        return uri_to_iri("".join(url))
+
+    url.append(url_quote(path.lstrip("/")))
+
+    if query_string:
+        url.append("?")
+        url.append(url_quote(query_string, safe=":&%=+$!*'(),"))
+
+    return uri_to_iri("".join(url))
diff --git a/venv/lib/python3.7/site-packages/werkzeug/security.py b/venv/lib/python3.7/site-packages/werkzeug/security.py
new file mode 100644
index 00000000..e23040af
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/security.py
@@ -0,0 +1,247 @@
+import hashlib
+import hmac
+import os
+import posixpath
+import secrets
+import typing as t
+import warnings
+
+if t.TYPE_CHECKING:
+    pass
+
+SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
+DEFAULT_PBKDF2_ITERATIONS = 260000
+
+_os_alt_seps: t.List[str] = list(
+    sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/"
+)
+
+
+def pbkdf2_hex(
+    data: t.Union[str, bytes],
+    salt: t.Union[str, bytes],
+    iterations: int = DEFAULT_PBKDF2_ITERATIONS,
+    keylen: t.Optional[int] = None,
+    hashfunc: t.Optional[t.Union[str, t.Callable]] = None,
+) -> str:
+    """Like :func:`pbkdf2_bin`, but returns a hex-encoded string.
+
+    :param data: the data to derive.
+    :param salt: the salt for the derivation.
+    :param iterations: the number of iterations.
+    :param keylen: the length of the resulting key.  If not provided,
+                   the digest size will be used.
+    :param hashfunc: the hash function to use.  This can either be the
+                     string name of a known hash function, or a function
+                     from the hashlib module.  Defaults to sha256.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac`
+        instead.
+
+    .. versionadded:: 0.9
+    """
+    warnings.warn(
+        "'pbkdf2_hex' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use 'hashlib.pbkdf2_hmac().hex()' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).hex()
+
+
+def pbkdf2_bin(
+    data: t.Union[str, bytes],
+    salt: t.Union[str, bytes],
+    iterations: int = DEFAULT_PBKDF2_ITERATIONS,
+    keylen: t.Optional[int] = None,
+    hashfunc: t.Optional[t.Union[str, t.Callable]] = None,
+) -> bytes:
+    """Returns a binary digest for the PBKDF2 hash algorithm of `data`
+    with the given `salt`. It iterates `iterations` times and produces a
+    key of `keylen` bytes. By default, SHA-256 is used as hash function;
+    a different hashlib `hashfunc` can be provided.
+
+    :param data: the data to derive.
+    :param salt: the salt for the derivation.
+    :param iterations: the number of iterations.
+    :param keylen: the length of the resulting key.  If not provided
+                   the digest size will be used.
+    :param hashfunc: the hash function to use.  This can either be the
+                     string name of a known hash function or a function
+                     from the hashlib module.  Defaults to sha256.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac`
+        instead.
+
+    .. versionadded:: 0.9
+    """
+    warnings.warn(
+        "'pbkdf2_bin' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use 'hashlib.pbkdf2_hmac()' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if isinstance(data, str):
+        data = data.encode("utf8")
+
+    if isinstance(salt, str):
+        salt = salt.encode("utf8")
+
+    if not hashfunc:
+        hash_name = "sha256"
+    elif callable(hashfunc):
+        hash_name = hashfunc().name
+    else:
+        hash_name = hashfunc
+
+    return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen)
+
+
+def safe_str_cmp(a: str, b: str) -> bool:
+    """This function compares strings in somewhat constant time.  This
+    requires that the length of at least one string is known in advance.
+
+    Returns `True` if the two strings are equal, or `False` if they are not.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use
+        :func:`hmac.compare_digest` instead.
+
+    .. versionadded:: 0.7
+    """
+    warnings.warn(
+        "'safe_str_cmp' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use 'hmac.compare_digest' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if isinstance(a, str):
+        a = a.encode("utf-8")  # type: ignore
+
+    if isinstance(b, str):
+        b = b.encode("utf-8")  # type: ignore
+
+    return hmac.compare_digest(a, b)
+
+
+def gen_salt(length: int) -> str:
+    """Generate a random string of SALT_CHARS with specified ``length``."""
+    if length <= 0:
+        raise ValueError("Salt length must be positive")
+
+    return "".join(secrets.choice(SALT_CHARS) for _ in range(length))
+
+
+def _hash_internal(method: str, salt: str, password: str) -> t.Tuple[str, str]:
+    """Internal password hash helper.  Supports plaintext without salt,
+    unsalted and salted passwords.  In case salted passwords are used
+    hmac is used.
+    """
+    if method == "plain":
+        return password, method
+
+    salt = salt.encode("utf-8")
+    password = password.encode("utf-8")
+
+    if method.startswith("pbkdf2:"):
+        if not salt:
+            raise ValueError("Salt is required for PBKDF2")
+
+        args = method[7:].split(":")
+
+        if len(args) not in (1, 2):
+            raise ValueError("Invalid number of arguments for PBKDF2")
+
+        method = args.pop(0)
+        iterations = int(args[0] or 0) if args else DEFAULT_PBKDF2_ITERATIONS
+        return (
+            hashlib.pbkdf2_hmac(method, password, salt, iterations).hex(),
+            f"pbkdf2:{method}:{iterations}",
+        )
+
+    if salt:
+        return hmac.new(salt, password, method).hexdigest(), method
+
+    return hashlib.new(method, password).hexdigest(), method
+
+
+def generate_password_hash(
+    password: str, method: str = "pbkdf2:sha256", salt_length: int = 16
+) -> str:
+    """Hash a password with the given method and salt with a string of
+    the given length. The format of the string returned includes the method
+    that was used so that :func:`check_password_hash` can check the hash.
+
+    The format for the hashed string looks like this::
+
+        method$salt$hash
+
+    This method can **not** generate unsalted passwords but it is possible
+    to set param method='plain' in order to enforce plaintext passwords.
+    If a salt is used, hmac is used internally to salt the password.
+
+    If PBKDF2 is wanted it can be enabled by setting the method to
+    ``pbkdf2:method:iterations`` where iterations is optional::
+
+        pbkdf2:sha256:80000$salt$hash
+        pbkdf2:sha256$salt$hash
+
+    :param password: the password to hash.
+    :param method: the hash method to use (one that hashlib supports). Can
+                   optionally be in the format ``pbkdf2:method:iterations``
+                   to enable PBKDF2.
+    :param salt_length: the length of the salt in letters.
+    """
+    salt = gen_salt(salt_length) if method != "plain" else ""
+    h, actual_method = _hash_internal(method, salt, password)
+    return f"{actual_method}${salt}${h}"
+
+
+def check_password_hash(pwhash: str, password: str) -> bool:
+    """Check a password against a given salted and hashed password value.
+    In order to support unsalted legacy passwords this method supports
+    plain text passwords, md5 and sha1 hashes (both salted and unsalted).
+
+    Returns `True` if the password matched, `False` otherwise.
+
+    :param pwhash: a hashed string like returned by
+                   :func:`generate_password_hash`.
+    :param password: the plaintext password to compare against the hash.
+    """
+    if pwhash.count("$") < 2:
+        return False
+
+    method, salt, hashval = pwhash.split("$", 2)
+    return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval)
+
+
+def safe_join(directory: str, *pathnames: str) -> t.Optional[str]:
+    """Safely join zero or more untrusted path components to a base
+    directory to avoid escaping the base directory.
+
+    :param directory: The trusted base directory.
+    :param pathnames: The untrusted path components relative to the
+        base directory.
+    :return: A safe path, otherwise ``None``.
+    """
+    parts = [directory]
+
+    for filename in pathnames:
+        if filename != "":
+            filename = posixpath.normpath(filename)
+
+        if (
+            any(sep in filename for sep in _os_alt_seps)
+            or os.path.isabs(filename)
+            or filename == ".."
+            or filename.startswith("../")
+        ):
+            return None
+
+        parts.append(filename)
+
+    return posixpath.join(*parts)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/serving.py b/venv/lib/python3.7/site-packages/werkzeug/serving.py
new file mode 100644
index 00000000..1be99492
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/serving.py
@@ -0,0 +1,1079 @@
+"""A WSGI and HTTP server for use **during development only**. This
+server is convenient to use, but is not designed to be particularly
+stable, secure, or efficient. Use a dedicate WSGI server and HTTP
+server when deploying to production.
+
+It provides features like interactive debugging and code reloading. Use
+``run_simple`` to start the server. Put this in a ``run.py`` script:
+
+.. code-block:: python
+
+    from myapp import create_app
+    from werkzeug import run_simple
+"""
+import io
+import os
+import platform
+import signal
+import socket
+import socketserver
+import sys
+import typing as t
+import warnings
+from datetime import datetime as dt
+from datetime import timedelta
+from datetime import timezone
+from http.server import BaseHTTPRequestHandler
+from http.server import HTTPServer
+
+from ._internal import _log
+from ._internal import _wsgi_encoding_dance
+from .exceptions import InternalServerError
+from .urls import uri_to_iri
+from .urls import url_parse
+from .urls import url_unquote
+
+try:
+    import ssl
+except ImportError:
+
+    class _SslDummy:
+        def __getattr__(self, name: str) -> t.Any:
+            raise RuntimeError("SSL support unavailable")
+
+    ssl = _SslDummy()  # type: ignore
+
+_log_add_style = True
+
+if os.name == "nt":
+    try:
+        __import__("colorama")
+    except ImportError:
+        _log_add_style = False
+
+can_fork = hasattr(os, "fork")
+
+if can_fork:
+    ForkingMixIn = socketserver.ForkingMixIn
+else:
+
+    class ForkingMixIn:  # type: ignore
+        pass
+
+
+try:
+    af_unix = socket.AF_UNIX
+except AttributeError:
+    af_unix = None  # type: ignore
+
+LISTEN_QUEUE = 128
+can_open_by_fd = not platform.system() == "Windows" and hasattr(socket, "fromfd")
+
+_TSSLContextArg = t.Optional[
+    t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], "te.Literal['adhoc']"]
+]
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te  # noqa: F401
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+    from cryptography.hazmat.primitives.asymmetric.rsa import (
+        RSAPrivateKeyWithSerialization,
+    )
+    from cryptography.x509 import Certificate
+
+
+class DechunkedInput(io.RawIOBase):
+    """An input stream that handles Transfer-Encoding 'chunked'"""
+
+    def __init__(self, rfile: t.BinaryIO) -> None:
+        self._rfile = rfile
+        self._done = False
+        self._len = 0
+
+    def readable(self) -> bool:
+        return True
+
+    def read_chunk_len(self) -> int:
+        try:
+            line = self._rfile.readline().decode("latin1")
+            _len = int(line.strip(), 16)
+        except ValueError:
+            raise OSError("Invalid chunk header")
+        if _len < 0:
+            raise OSError("Negative chunk length not allowed")
+        return _len
+
+    def readinto(self, buf: bytearray) -> int:  # type: ignore
+        read = 0
+        while not self._done and read < len(buf):
+            if self._len == 0:
+                # This is the first chunk or we fully consumed the previous
+                # one. Read the next length of the next chunk
+                self._len = self.read_chunk_len()
+
+            if self._len == 0:
+                # Found the final chunk of size 0. The stream is now exhausted,
+                # but there is still a final newline that should be consumed
+                self._done = True
+
+            if self._len > 0:
+                # There is data (left) in this chunk, so append it to the
+                # buffer. If this operation fully consumes the chunk, this will
+                # reset self._len to 0.
+                n = min(len(buf), self._len)
+
+                # If (read + chunk size) becomes more than len(buf), buf will
+                # grow beyond the original size and read more data than
+                # required. So only read as much data as can fit in buf.
+                if read + n > len(buf):
+                    buf[read:] = self._rfile.read(len(buf) - read)
+                    self._len -= len(buf) - read
+                    read = len(buf)
+                else:
+                    buf[read : read + n] = self._rfile.read(n)
+                    self._len -= n
+                    read += n
+
+            if self._len == 0:
+                # Skip the terminating newline of a chunk that has been fully
+                # consumed. This also applies to the 0-sized final chunk
+                terminator = self._rfile.readline()
+                if terminator not in (b"\n", b"\r\n", b"\r"):
+                    raise OSError("Missing chunk terminating newline")
+
+        return read
+
+
+class WSGIRequestHandler(BaseHTTPRequestHandler):
+    """A request handler that implements WSGI dispatching."""
+
+    server: "BaseWSGIServer"
+
+    @property
+    def server_version(self) -> str:  # type: ignore
+        from . import __version__
+
+        return f"Werkzeug/{__version__}"
+
+    def make_environ(self) -> "WSGIEnvironment":
+        request_url = url_parse(self.path)
+
+        def shutdown_server() -> None:
+            warnings.warn(
+                "The 'environ['werkzeug.server.shutdown']' function is"
+                " deprecated and will be removed in Werkzeug 2.1.",
+                stacklevel=2,
+            )
+            self.server.shutdown_signal = True
+
+        url_scheme = "http" if self.server.ssl_context is None else "https"
+
+        if not self.client_address:
+            self.client_address = ("<local>", 0)
+        elif isinstance(self.client_address, str):
+            self.client_address = (self.client_address, 0)
+
+        # If there was no scheme but the path started with two slashes,
+        # the first segment may have been incorrectly parsed as the
+        # netloc, prepend it to the path again.
+        if not request_url.scheme and request_url.netloc:
+            path_info = f"/{request_url.netloc}{request_url.path}"
+        else:
+            path_info = request_url.path
+
+        path_info = url_unquote(path_info)
+
+        environ: "WSGIEnvironment" = {
+            "wsgi.version": (1, 0),
+            "wsgi.url_scheme": url_scheme,
+            "wsgi.input": self.rfile,
+            "wsgi.errors": sys.stderr,
+            "wsgi.multithread": self.server.multithread,
+            "wsgi.multiprocess": self.server.multiprocess,
+            "wsgi.run_once": False,
+            "werkzeug.server.shutdown": shutdown_server,
+            "werkzeug.socket": self.connection,
+            "SERVER_SOFTWARE": self.server_version,
+            "REQUEST_METHOD": self.command,
+            "SCRIPT_NAME": "",
+            "PATH_INFO": _wsgi_encoding_dance(path_info),
+            "QUERY_STRING": _wsgi_encoding_dance(request_url.query),
+            # Non-standard, added by mod_wsgi, uWSGI
+            "REQUEST_URI": _wsgi_encoding_dance(self.path),
+            # Non-standard, added by gunicorn
+            "RAW_URI": _wsgi_encoding_dance(self.path),
+            "REMOTE_ADDR": self.address_string(),
+            "REMOTE_PORT": self.port_integer(),
+            "SERVER_NAME": self.server.server_address[0],
+            "SERVER_PORT": str(self.server.server_address[1]),
+            "SERVER_PROTOCOL": self.request_version,
+        }
+
+        for key, value in self.headers.items():
+            key = key.upper().replace("-", "_")
+            value = value.replace("\r\n", "")
+            if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+                key = f"HTTP_{key}"
+                if key in environ:
+                    value = f"{environ[key]},{value}"
+            environ[key] = value
+
+        if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked":
+            environ["wsgi.input_terminated"] = True
+            environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"])
+
+        # Per RFC 2616, if the URL is absolute, use that as the host.
+        # We're using "has a scheme" to indicate an absolute URL.
+        if request_url.scheme and request_url.netloc:
+            environ["HTTP_HOST"] = request_url.netloc
+
+        try:
+            # binary_form=False gives nicer information, but wouldn't be compatible with
+            # what Nginx or Apache could return.
+            peer_cert = self.connection.getpeercert(binary_form=True)
+            if peer_cert is not None:
+                # Nginx and Apache use PEM format.
+                environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert)
+        except ValueError:
+            # SSL handshake hasn't finished.
+            self.server.log("error", "Cannot fetch SSL peer certificate info")
+        except AttributeError:
+            # Not using TLS, the socket will not have getpeercert().
+            pass
+
+        return environ
+
+    def run_wsgi(self) -> None:
+        if self.headers.get("Expect", "").lower().strip() == "100-continue":
+            self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+
+        self.environ = environ = self.make_environ()
+        status_set: t.Optional[str] = None
+        headers_set: t.Optional[t.List[t.Tuple[str, str]]] = None
+        status_sent: t.Optional[str] = None
+        headers_sent: t.Optional[t.List[t.Tuple[str, str]]] = None
+
+        def write(data: bytes) -> None:
+            nonlocal status_sent, headers_sent
+            assert status_set is not None, "write() before start_response"
+            assert headers_set is not None, "write() before start_response"
+            if status_sent is None:
+                status_sent = status_set
+                headers_sent = headers_set
+                try:
+                    code_str, msg = status_sent.split(None, 1)
+                except ValueError:
+                    code_str, msg = status_sent, ""
+                code = int(code_str)
+                self.send_response(code, msg)
+                header_keys = set()
+                for key, value in headers_sent:
+                    self.send_header(key, value)
+                    key = key.lower()
+                    header_keys.add(key)
+                if not (
+                    "content-length" in header_keys
+                    or environ["REQUEST_METHOD"] == "HEAD"
+                    or code < 200
+                    or code in (204, 304)
+                ):
+                    self.close_connection = True
+                    self.send_header("Connection", "close")
+                if "server" not in header_keys:
+                    self.send_header("Server", self.version_string())
+                if "date" not in header_keys:
+                    self.send_header("Date", self.date_time_string())
+                self.end_headers()
+
+            assert isinstance(data, bytes), "applications must write bytes"
+            self.wfile.write(data)
+            self.wfile.flush()
+
+        def start_response(status, headers, exc_info=None):  # type: ignore
+            nonlocal status_set, headers_set
+            if exc_info:
+                try:
+                    if headers_sent:
+                        raise exc_info[1].with_traceback(exc_info[2])
+                finally:
+                    exc_info = None
+            elif headers_set:
+                raise AssertionError("Headers already set")
+            status_set = status
+            headers_set = headers
+            return write
+
+        def execute(app: "WSGIApplication") -> None:
+            application_iter = app(environ, start_response)
+            try:
+                for data in application_iter:
+                    write(data)
+                if not headers_sent:
+                    write(b"")
+            finally:
+                if hasattr(application_iter, "close"):
+                    application_iter.close()  # type: ignore
+
+        try:
+            execute(self.server.app)
+        except (ConnectionError, socket.timeout) as e:
+            self.connection_dropped(e, environ)
+        except Exception:
+            if self.server.passthrough_errors:
+                raise
+            from .debug.tbtools import get_current_traceback
+
+            traceback = get_current_traceback(ignore_system_exceptions=True)
+            try:
+                # if we haven't yet sent the headers but they are set
+                # we roll back to be able to set them again.
+                if status_sent is None:
+                    status_set = None
+                    headers_set = None
+                execute(InternalServerError())
+            except Exception:
+                pass
+            self.server.log("error", "Error on request:\n%s", traceback.plaintext)
+
+    def handle(self) -> None:
+        """Handles a request ignoring dropped connections."""
+        try:
+            BaseHTTPRequestHandler.handle(self)
+        except (ConnectionError, socket.timeout) as e:
+            self.connection_dropped(e)
+        except Exception as e:
+            if self.server.ssl_context is not None and is_ssl_error(e):
+                self.log_error("SSL error occurred: %s", e)
+            else:
+                raise
+        if self.server.shutdown_signal:
+            self.initiate_shutdown()
+
+    def initiate_shutdown(self) -> None:
+        if is_running_from_reloader():
+            # Windows does not provide SIGKILL, go with SIGTERM then.
+            sig = getattr(signal, "SIGKILL", signal.SIGTERM)
+            os.kill(os.getpid(), sig)
+
+        self.server._BaseServer__shutdown_request = True  # type: ignore
+
+    def connection_dropped(
+        self, error: BaseException, environ: t.Optional["WSGIEnvironment"] = None
+    ) -> None:
+        """Called if the connection was closed by the client.  By default
+        nothing happens.
+        """
+
+    def handle_one_request(self) -> None:
+        """Handle a single HTTP request."""
+        self.raw_requestline = self.rfile.readline()
+        if not self.raw_requestline:
+            self.close_connection = True
+        elif self.parse_request():
+            self.run_wsgi()
+
+    def send_response(self, code: int, message: t.Optional[str] = None) -> None:
+        """Send the response header and log the response code."""
+        self.log_request(code)
+        if message is None:
+            message = self.responses[code][0] if code in self.responses else ""
+        if self.request_version != "HTTP/0.9":
+            hdr = f"{self.protocol_version} {code} {message}\r\n"
+            self.wfile.write(hdr.encode("ascii"))
+
+    def version_string(self) -> str:
+        return super().version_string().strip()
+
+    def address_string(self) -> str:
+        if getattr(self, "environ", None):
+            return self.environ["REMOTE_ADDR"]  # type: ignore
+
+        if not self.client_address:
+            return "<local>"
+
+        return self.client_address[0]
+
+    def port_integer(self) -> int:
+        return self.client_address[1]
+
+    def log_request(
+        self, code: t.Union[int, str] = "-", size: t.Union[int, str] = "-"
+    ) -> None:
+        try:
+            path = uri_to_iri(self.path)
+            msg = f"{self.command} {path} {self.request_version}"
+        except AttributeError:
+            # path isn't set if the requestline was bad
+            msg = self.requestline
+
+        code = str(code)
+
+        if _log_add_style:
+            if code[0] == "1":  # 1xx - Informational
+                msg = _ansi_style(msg, "bold")
+            elif code == "200":  # 2xx - Success
+                pass
+            elif code == "304":  # 304 - Resource Not Modified
+                msg = _ansi_style(msg, "cyan")
+            elif code[0] == "3":  # 3xx - Redirection
+                msg = _ansi_style(msg, "green")
+            elif code == "404":  # 404 - Resource Not Found
+                msg = _ansi_style(msg, "yellow")
+            elif code[0] == "4":  # 4xx - Client Error
+                msg = _ansi_style(msg, "bold", "red")
+            else:  # 5xx, or any other response
+                msg = _ansi_style(msg, "bold", "magenta")
+
+        self.log("info", '"%s" %s %s', msg, code, size)
+
+    def log_error(self, format: str, *args: t.Any) -> None:
+        self.log("error", format, *args)
+
+    def log_message(self, format: str, *args: t.Any) -> None:
+        self.log("info", format, *args)
+
+    def log(self, type: str, message: str, *args: t.Any) -> None:
+        _log(
+            type,
+            f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n",
+            *args,
+        )
+
+
+def _ansi_style(value: str, *styles: str) -> str:
+    codes = {
+        "bold": 1,
+        "red": 31,
+        "green": 32,
+        "yellow": 33,
+        "magenta": 35,
+        "cyan": 36,
+    }
+
+    for style in styles:
+        value = f"\x1b[{codes[style]}m{value}"
+
+    return f"{value}\x1b[0m"
+
+
+def generate_adhoc_ssl_pair(
+    cn: t.Optional[str] = None,
+) -> t.Tuple["Certificate", "RSAPrivateKeyWithSerialization"]:
+    try:
+        from cryptography import x509
+        from cryptography.x509.oid import NameOID
+        from cryptography.hazmat.backends import default_backend
+        from cryptography.hazmat.primitives import hashes
+        from cryptography.hazmat.primitives.asymmetric import rsa
+    except ImportError:
+        raise TypeError("Using ad-hoc certificates requires the cryptography library.")
+
+    backend = default_backend()
+    pkey = rsa.generate_private_key(
+        public_exponent=65537, key_size=2048, backend=backend
+    )
+
+    # pretty damn sure that this is not actually accepted by anyone
+    if cn is None:
+        cn = "*"
+
+    subject = x509.Name(
+        [
+            x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"),
+            x509.NameAttribute(NameOID.COMMON_NAME, cn),
+        ]
+    )
+
+    backend = default_backend()
+    cert = (
+        x509.CertificateBuilder()
+        .subject_name(subject)
+        .issuer_name(subject)
+        .public_key(pkey.public_key())
+        .serial_number(x509.random_serial_number())
+        .not_valid_before(dt.now(timezone.utc))
+        .not_valid_after(dt.now(timezone.utc) + timedelta(days=365))
+        .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
+        .add_extension(x509.SubjectAlternativeName([x509.DNSName("*")]), critical=False)
+        .sign(pkey, hashes.SHA256(), backend)
+    )
+    return cert, pkey
+
+
+def make_ssl_devcert(
+    base_path: str, host: t.Optional[str] = None, cn: t.Optional[str] = None
+) -> t.Tuple[str, str]:
+    """Creates an SSL key for development.  This should be used instead of
+    the ``'adhoc'`` key which generates a new cert on each server start.
+    It accepts a path for where it should store the key and cert and
+    either a host or CN.  If a host is given it will use the CN
+    ``*.host/CN=host``.
+
+    For more information see :func:`run_simple`.
+
+    .. versionadded:: 0.9
+
+    :param base_path: the path to the certificate and key.  The extension
+                      ``.crt`` is added for the certificate, ``.key`` is
+                      added for the key.
+    :param host: the name of the host.  This can be used as an alternative
+                 for the `cn`.
+    :param cn: the `CN` to use.
+    """
+
+    if host is not None:
+        cn = f"*.{host}/CN={host}"
+    cert, pkey = generate_adhoc_ssl_pair(cn=cn)
+
+    from cryptography.hazmat.primitives import serialization
+
+    cert_file = f"{base_path}.crt"
+    pkey_file = f"{base_path}.key"
+
+    with open(cert_file, "wb") as f:
+        f.write(cert.public_bytes(serialization.Encoding.PEM))
+    with open(pkey_file, "wb") as f:
+        f.write(
+            pkey.private_bytes(
+                encoding=serialization.Encoding.PEM,
+                format=serialization.PrivateFormat.TraditionalOpenSSL,
+                encryption_algorithm=serialization.NoEncryption(),
+            )
+        )
+
+    return cert_file, pkey_file
+
+
+def generate_adhoc_ssl_context() -> "ssl.SSLContext":
+    """Generates an adhoc SSL context for the development server."""
+    import tempfile
+    import atexit
+
+    cert, pkey = generate_adhoc_ssl_pair()
+
+    from cryptography.hazmat.primitives import serialization
+
+    cert_handle, cert_file = tempfile.mkstemp()
+    pkey_handle, pkey_file = tempfile.mkstemp()
+    atexit.register(os.remove, pkey_file)
+    atexit.register(os.remove, cert_file)
+
+    os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM))
+    os.write(
+        pkey_handle,
+        pkey.private_bytes(
+            encoding=serialization.Encoding.PEM,
+            format=serialization.PrivateFormat.TraditionalOpenSSL,
+            encryption_algorithm=serialization.NoEncryption(),
+        ),
+    )
+
+    os.close(cert_handle)
+    os.close(pkey_handle)
+    ctx = load_ssl_context(cert_file, pkey_file)
+    return ctx
+
+
+def load_ssl_context(
+    cert_file: str, pkey_file: t.Optional[str] = None, protocol: t.Optional[int] = None
+) -> "ssl.SSLContext":
+    """Loads SSL context from cert/private key files and optional protocol.
+    Many parameters are directly taken from the API of
+    :py:class:`ssl.SSLContext`.
+
+    :param cert_file: Path of the certificate to use.
+    :param pkey_file: Path of the private key to use. If not given, the key
+                      will be obtained from the certificate file.
+    :param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module.
+        Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`.
+    """
+    if protocol is None:
+        protocol = ssl.PROTOCOL_TLS_SERVER
+
+    ctx = ssl.SSLContext(protocol)
+    ctx.load_cert_chain(cert_file, pkey_file)
+    return ctx
+
+
+def is_ssl_error(error: t.Optional[Exception] = None) -> bool:
+    """Checks if the given error (or the current one) is an SSL error."""
+    if error is None:
+        error = t.cast(Exception, sys.exc_info()[1])
+    return isinstance(error, ssl.SSLError)
+
+
+def select_address_family(host: str, port: int) -> socket.AddressFamily:
+    """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on
+    the host and port."""
+    if host.startswith("unix://"):
+        return socket.AF_UNIX
+    elif ":" in host and hasattr(socket, "AF_INET6"):
+        return socket.AF_INET6
+    return socket.AF_INET
+
+
+def get_sockaddr(
+    host: str, port: int, family: socket.AddressFamily
+) -> t.Union[t.Tuple[str, int], str]:
+    """Return a fully qualified socket address that can be passed to
+    :func:`socket.bind`."""
+    if family == af_unix:
+        return host.split("://", 1)[1]
+    try:
+        res = socket.getaddrinfo(
+            host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP
+        )
+    except socket.gaierror:
+        return host, port
+    return res[0][4]  # type: ignore
+
+
+def get_interface_ip(family: socket.AddressFamily) -> str:
+    """Get the IP address of an external interface. Used when binding to
+    0.0.0.0 or ::1 to show a more useful URL.
+
+    :meta private:
+    """
+    # arbitrary private address
+    host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219"
+
+    with socket.socket(family, socket.SOCK_DGRAM) as s:
+        try:
+            s.connect((host, 58162))
+        except OSError:
+            return "::1" if family == socket.AF_INET6 else "127.0.0.1"
+
+        return s.getsockname()[0]  # type: ignore
+
+
+class BaseWSGIServer(HTTPServer):
+
+    """Simple single-threaded, single-process WSGI server."""
+
+    multithread = False
+    multiprocess = False
+    request_queue_size = LISTEN_QUEUE
+
+    def __init__(
+        self,
+        host: str,
+        port: int,
+        app: "WSGIApplication",
+        handler: t.Optional[t.Type[WSGIRequestHandler]] = None,
+        passthrough_errors: bool = False,
+        ssl_context: t.Optional[_TSSLContextArg] = None,
+        fd: t.Optional[int] = None,
+    ) -> None:
+        if handler is None:
+            handler = WSGIRequestHandler
+
+        self.address_family = select_address_family(host, port)
+
+        if fd is not None:
+            real_sock = socket.fromfd(fd, self.address_family, socket.SOCK_STREAM)
+            port = 0
+
+        server_address = get_sockaddr(host, int(port), self.address_family)
+
+        # remove socket file if it already exists
+        if self.address_family == af_unix:
+            server_address = t.cast(str, server_address)
+
+            if os.path.exists(server_address):
+                os.unlink(server_address)
+
+        super().__init__(server_address, handler)  # type: ignore
+
+        self.app = app
+        self.passthrough_errors = passthrough_errors
+        self.shutdown_signal = False
+        self.host = host
+        self.port = self.socket.getsockname()[1]
+
+        # Patch in the original socket.
+        if fd is not None:
+            self.socket.close()
+            self.socket = real_sock
+            self.server_address = self.socket.getsockname()
+
+        if ssl_context is not None:
+            if isinstance(ssl_context, tuple):
+                ssl_context = load_ssl_context(*ssl_context)
+            if ssl_context == "adhoc":
+                ssl_context = generate_adhoc_ssl_context()
+
+            self.socket = ssl_context.wrap_socket(self.socket, server_side=True)
+            self.ssl_context: t.Optional["ssl.SSLContext"] = ssl_context
+        else:
+            self.ssl_context = None
+
+    def log(self, type: str, message: str, *args: t.Any) -> None:
+        _log(type, message, *args)
+
+    def serve_forever(self, poll_interval: float = 0.5) -> None:
+        self.shutdown_signal = False
+        try:
+            super().serve_forever(poll_interval=poll_interval)
+        except KeyboardInterrupt:
+            pass
+        finally:
+            self.server_close()
+
+    def handle_error(self, request: t.Any, client_address: t.Tuple[str, int]) -> None:
+        if self.passthrough_errors:
+            raise
+
+        return super().handle_error(request, client_address)
+
+
+class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer):
+
+    """A WSGI server that does threading."""
+
+    multithread = True
+    daemon_threads = True
+
+
+class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer):
+
+    """A WSGI server that does forking."""
+
+    multiprocess = True
+
+    def __init__(
+        self,
+        host: str,
+        port: int,
+        app: "WSGIApplication",
+        processes: int = 40,
+        handler: t.Optional[t.Type[WSGIRequestHandler]] = None,
+        passthrough_errors: bool = False,
+        ssl_context: t.Optional[_TSSLContextArg] = None,
+        fd: t.Optional[int] = None,
+    ) -> None:
+        if not can_fork:
+            raise ValueError("Your platform does not support forking.")
+        BaseWSGIServer.__init__(
+            self, host, port, app, handler, passthrough_errors, ssl_context, fd
+        )
+        self.max_children = processes
+
+
+def make_server(
+    host: str,
+    port: int,
+    app: "WSGIApplication",
+    threaded: bool = False,
+    processes: int = 1,
+    request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None,
+    passthrough_errors: bool = False,
+    ssl_context: t.Optional[_TSSLContextArg] = None,
+    fd: t.Optional[int] = None,
+) -> BaseWSGIServer:
+    """Create a new server instance that is either threaded, or forks
+    or just processes one request after another.
+    """
+    if threaded and processes > 1:
+        raise ValueError("cannot have a multithreaded and multi process server.")
+    elif threaded:
+        return ThreadedWSGIServer(
+            host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd
+        )
+    elif processes > 1:
+        return ForkingWSGIServer(
+            host,
+            port,
+            app,
+            processes,
+            request_handler,
+            passthrough_errors,
+            ssl_context,
+            fd=fd,
+        )
+    else:
+        return BaseWSGIServer(
+            host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd
+        )
+
+
+def is_running_from_reloader() -> bool:
+    """Checks if the application is running from within the Werkzeug
+    reloader subprocess.
+
+    .. versionadded:: 0.10
+    """
+    return os.environ.get("WERKZEUG_RUN_MAIN") == "true"
+
+
+def run_simple(
+    hostname: str,
+    port: int,
+    application: "WSGIApplication",
+    use_reloader: bool = False,
+    use_debugger: bool = False,
+    use_evalex: bool = True,
+    extra_files: t.Optional[t.Iterable[str]] = None,
+    exclude_patterns: t.Optional[t.Iterable[str]] = None,
+    reloader_interval: int = 1,
+    reloader_type: str = "auto",
+    threaded: bool = False,
+    processes: int = 1,
+    request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None,
+    static_files: t.Optional[t.Dict[str, t.Union[str, t.Tuple[str, str]]]] = None,
+    passthrough_errors: bool = False,
+    ssl_context: t.Optional[_TSSLContextArg] = None,
+) -> None:
+    """Start a WSGI application. Optional features include a reloader,
+    multithreading and fork support.
+
+    This function has a command-line interface too::
+
+        python -m werkzeug.serving --help
+
+    .. versionchanged:: 2.0
+        Added ``exclude_patterns`` parameter.
+
+    .. versionadded:: 0.5
+       `static_files` was added to simplify serving of static files as well
+       as `passthrough_errors`.
+
+    .. versionadded:: 0.6
+       support for SSL was added.
+
+    .. versionadded:: 0.8
+       Added support for automatically loading a SSL context from certificate
+       file and private key.
+
+    .. versionadded:: 0.9
+       Added command-line interface.
+
+    .. versionadded:: 0.10
+       Improved the reloader and added support for changing the backend
+       through the `reloader_type` parameter.  See :ref:`reloader`
+       for more information.
+
+    .. versionchanged:: 0.15
+        Bind to a Unix socket by passing a path that starts with
+        ``unix://`` as the ``hostname``.
+
+    :param hostname: The host to bind to, for example ``'localhost'``.
+        If the value is a path that starts with ``unix://`` it will bind
+        to a Unix socket instead of a TCP socket..
+    :param port: The port for the server.  eg: ``8080``
+    :param application: the WSGI application to execute
+    :param use_reloader: should the server automatically restart the python
+                         process if modules were changed?
+    :param use_debugger: should the werkzeug debugging system be used?
+    :param use_evalex: should the exception evaluation feature be enabled?
+    :param extra_files: a list of files the reloader should watch
+                        additionally to the modules.  For example configuration
+                        files.
+    :param exclude_patterns: List of :mod:`fnmatch` patterns to ignore
+        when running the reloader. For example, ignore cache files that
+        shouldn't reload when updated.
+    :param reloader_interval: the interval for the reloader in seconds.
+    :param reloader_type: the type of reloader to use.  The default is
+                          auto detection.  Valid values are ``'stat'`` and
+                          ``'watchdog'``. See :ref:`reloader` for more
+                          information.
+    :param threaded: should the process handle each request in a separate
+                     thread?
+    :param processes: if greater than 1 then handle each request in a new process
+                      up to this maximum number of concurrent processes.
+    :param request_handler: optional parameter that can be used to replace
+                            the default one.  You can use this to replace it
+                            with a different
+                            :class:`~BaseHTTPServer.BaseHTTPRequestHandler`
+                            subclass.
+    :param static_files: a list or dict of paths for static files.  This works
+                         exactly like :class:`SharedDataMiddleware`, it's actually
+                         just wrapping the application in that middleware before
+                         serving.
+    :param passthrough_errors: set this to `True` to disable the error catching.
+                               This means that the server will die on errors but
+                               it can be useful to hook debuggers in (pdb etc.)
+    :param ssl_context: an SSL context for the connection. Either an
+                        :class:`ssl.SSLContext`, a tuple in the form
+                        ``(cert_file, pkey_file)``, the string ``'adhoc'`` if
+                        the server should automatically create one, or ``None``
+                        to disable SSL (which is the default).
+    """
+    if not isinstance(port, int):
+        raise TypeError("port must be an integer")
+    if use_debugger:
+        from .debug import DebuggedApplication
+
+        application = DebuggedApplication(application, use_evalex)
+    if static_files:
+        from .middleware.shared_data import SharedDataMiddleware
+
+        application = SharedDataMiddleware(application, static_files)
+
+    def log_startup(sock: socket.socket) -> None:
+        all_addresses_message = (
+            " * Running on all addresses.\n"
+            "   WARNING: This is a development server. Do not use it in"
+            " a production deployment."
+        )
+
+        if sock.family == af_unix:
+            _log("info", " * Running on %s (Press CTRL+C to quit)", hostname)
+        else:
+            if hostname == "0.0.0.0":
+                _log("warning", all_addresses_message)
+                display_hostname = get_interface_ip(socket.AF_INET)
+            elif hostname == "::":
+                _log("warning", all_addresses_message)
+                display_hostname = get_interface_ip(socket.AF_INET6)
+            else:
+                display_hostname = hostname
+
+            if ":" in display_hostname:
+                display_hostname = f"[{display_hostname}]"
+
+            _log(
+                "info",
+                " * Running on %s://%s:%d/ (Press CTRL+C to quit)",
+                "http" if ssl_context is None else "https",
+                display_hostname,
+                sock.getsockname()[1],
+            )
+
+    def inner() -> None:
+        try:
+            fd: t.Optional[int] = int(os.environ["WERKZEUG_SERVER_FD"])
+        except (LookupError, ValueError):
+            fd = None
+        srv = make_server(
+            hostname,
+            port,
+            application,
+            threaded,
+            processes,
+            request_handler,
+            passthrough_errors,
+            ssl_context,
+            fd=fd,
+        )
+        if fd is None:
+            log_startup(srv.socket)
+        srv.serve_forever()
+
+    if use_reloader:
+        # If we're not running already in the subprocess that is the
+        # reloader we want to open up a socket early to make sure the
+        # port is actually available.
+        if not is_running_from_reloader():
+            if port == 0 and not can_open_by_fd:
+                raise ValueError(
+                    "Cannot bind to a random port with enabled "
+                    "reloader if the Python interpreter does "
+                    "not support socket opening by fd."
+                )
+
+            # Create and destroy a socket so that any exceptions are
+            # raised before we spawn a separate Python interpreter and
+            # lose this ability.
+            address_family = select_address_family(hostname, port)
+            server_address = get_sockaddr(hostname, port, address_family)
+            s = socket.socket(address_family, socket.SOCK_STREAM)
+            s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            s.bind(server_address)
+            s.set_inheritable(True)
+
+            # If we can open the socket by file descriptor, then we can just
+            # reuse this one and our socket will survive the restarts.
+            if can_open_by_fd:
+                os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno())
+                s.listen(LISTEN_QUEUE)
+                log_startup(s)
+            else:
+                s.close()
+                if address_family == af_unix:
+                    server_address = t.cast(str, server_address)
+                    _log("info", "Unlinking %s", server_address)
+                    os.unlink(server_address)
+
+        from ._reloader import run_with_reloader as _rwr
+
+        _rwr(
+            inner,
+            extra_files=extra_files,
+            exclude_patterns=exclude_patterns,
+            interval=reloader_interval,
+            reloader_type=reloader_type,
+        )
+    else:
+        inner()
+
+
+def run_with_reloader(*args: t.Any, **kwargs: t.Any) -> None:
+    """Run a process with the reloader. This is not a public API, do
+    not use this function.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1.
+    """
+    from ._reloader import run_with_reloader as _rwr
+
+    warnings.warn(
+        (
+            "'run_with_reloader' is a private API, it will no longer be"
+            " accessible in Werkzeug 2.1. Use 'run_simple' instead."
+        ),
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    _rwr(*args, **kwargs)
+
+
+def main() -> None:
+    """A simple command-line interface for :py:func:`run_simple`."""
+    import argparse
+    from .utils import import_string
+
+    _log("warning", "This CLI is deprecated and will be removed in version 2.1.")
+
+    parser = argparse.ArgumentParser(
+        description="Run the given WSGI application with the development server.",
+        allow_abbrev=False,
+    )
+    parser.add_argument(
+        "-b",
+        "--bind",
+        dest="address",
+        help="The hostname:port the app should listen on.",
+    )
+    parser.add_argument(
+        "-d",
+        "--debug",
+        action="store_true",
+        help="Show the interactive debugger for unhandled exceptions.",
+    )
+    parser.add_argument(
+        "-r",
+        "--reload",
+        action="store_true",
+        help="Reload the process if modules change.",
+    )
+    parser.add_argument(
+        "application", help="Application to import and serve, in the form module:app."
+    )
+    args = parser.parse_args()
+    hostname, port = None, None
+
+    if args.address:
+        hostname, _, port = args.address.partition(":")
+
+    run_simple(
+        hostname=hostname or "127.0.0.1",
+        port=int(port or 5000),
+        application=import_string(args.application),
+        use_reloader=args.reload,
+        use_debugger=args.debug,
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/venv/lib/python3.7/site-packages/werkzeug/test.py b/venv/lib/python3.7/site-packages/werkzeug/test.py
new file mode 100644
index 00000000..9301c02f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/test.py
@@ -0,0 +1,1324 @@
+import mimetypes
+import sys
+import typing as t
+import warnings
+from collections import defaultdict
+from datetime import datetime
+from datetime import timedelta
+from http.cookiejar import CookieJar
+from io import BytesIO
+from itertools import chain
+from random import random
+from tempfile import TemporaryFile
+from time import time
+from urllib.request import Request as _UrllibRequest
+
+from ._internal import _get_environ
+from ._internal import _make_encode_wrapper
+from ._internal import _wsgi_decoding_dance
+from ._internal import _wsgi_encoding_dance
+from .datastructures import Authorization
+from .datastructures import CallbackDict
+from .datastructures import CombinedMultiDict
+from .datastructures import EnvironHeaders
+from .datastructures import FileMultiDict
+from .datastructures import Headers
+from .datastructures import MultiDict
+from .http import dump_cookie
+from .http import dump_options_header
+from .http import parse_options_header
+from .sansio.multipart import Data
+from .sansio.multipart import Epilogue
+from .sansio.multipart import Field
+from .sansio.multipart import File
+from .sansio.multipart import MultipartEncoder
+from .sansio.multipart import Preamble
+from .urls import iri_to_uri
+from .urls import url_encode
+from .urls import url_fix
+from .urls import url_parse
+from .urls import url_unparse
+from .urls import url_unquote
+from .utils import get_content_type
+from .wrappers.request import Request
+from .wrappers.response import Response
+from .wsgi import ClosingIterator
+from .wsgi import get_current_url
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+def stream_encode_multipart(
+    data: t.Mapping[str, t.Any],
+    use_tempfile: bool = True,
+    threshold: int = 1024 * 500,
+    boundary: t.Optional[str] = None,
+    charset: str = "utf-8",
+) -> t.Tuple[t.BinaryIO, int, str]:
+    """Encode a dict of values (either strings or file descriptors or
+    :class:`FileStorage` objects.) into a multipart encoded string stored
+    in a file descriptor.
+    """
+    if boundary is None:
+        boundary = f"---------------WerkzeugFormPart_{time()}{random()}"
+
+    stream: t.BinaryIO = BytesIO()
+    total_length = 0
+    on_disk = False
+
+    if use_tempfile:
+
+        def write_binary(s: bytes) -> int:
+            nonlocal stream, total_length, on_disk
+
+            if on_disk:
+                return stream.write(s)
+            else:
+                length = len(s)
+
+                if length + total_length <= threshold:
+                    stream.write(s)
+                else:
+                    new_stream = t.cast(t.BinaryIO, TemporaryFile("wb+"))
+                    new_stream.write(stream.getvalue())  # type: ignore
+                    new_stream.write(s)
+                    stream = new_stream
+                    on_disk = True
+
+                total_length += length
+                return length
+
+    else:
+        write_binary = stream.write
+
+    encoder = MultipartEncoder(boundary.encode())
+    write_binary(encoder.send_event(Preamble(data=b"")))
+    for key, value in _iter_data(data):
+        reader = getattr(value, "read", None)
+        if reader is not None:
+            filename = getattr(value, "filename", getattr(value, "name", None))
+            content_type = getattr(value, "content_type", None)
+            if content_type is None:
+                content_type = (
+                    filename
+                    and mimetypes.guess_type(filename)[0]
+                    or "application/octet-stream"
+                )
+            headers = Headers([("Content-Type", content_type)])
+            if filename is None:
+                write_binary(encoder.send_event(Field(name=key, headers=headers)))
+            else:
+                write_binary(
+                    encoder.send_event(
+                        File(name=key, filename=filename, headers=headers)
+                    )
+                )
+            while True:
+                chunk = reader(16384)
+
+                if not chunk:
+                    break
+
+                write_binary(encoder.send_event(Data(data=chunk, more_data=True)))
+        else:
+            if not isinstance(value, str):
+                value = str(value)
+            write_binary(encoder.send_event(Field(name=key, headers=Headers())))
+            write_binary(
+                encoder.send_event(Data(data=value.encode(charset), more_data=False))
+            )
+
+    write_binary(encoder.send_event(Epilogue(data=b"")))
+
+    length = stream.tell()
+    stream.seek(0)
+    return stream, length, boundary
+
+
+def encode_multipart(
+    values: t.Mapping[str, t.Any],
+    boundary: t.Optional[str] = None,
+    charset: str = "utf-8",
+) -> t.Tuple[str, bytes]:
+    """Like `stream_encode_multipart` but returns a tuple in the form
+    (``boundary``, ``data``) where data is bytes.
+    """
+    stream, length, boundary = stream_encode_multipart(
+        values, use_tempfile=False, boundary=boundary, charset=charset
+    )
+    return boundary, stream.read()
+
+
+class _TestCookieHeaders:
+    """A headers adapter for cookielib"""
+
+    def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None:
+        self.headers = headers
+
+    def getheaders(self, name: str) -> t.Iterable[str]:
+        headers = []
+        name = name.lower()
+        for k, v in self.headers:
+            if k.lower() == name:
+                headers.append(v)
+        return headers
+
+    def get_all(
+        self, name: str, default: t.Optional[t.Iterable[str]] = None
+    ) -> t.Iterable[str]:
+        headers = self.getheaders(name)
+
+        if not headers:
+            return default  # type: ignore
+
+        return headers
+
+
+class _TestCookieResponse:
+    """Something that looks like a httplib.HTTPResponse, but is actually just an
+    adapter for our test responses to make them available for cookielib.
+    """
+
+    def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None:
+        self.headers = _TestCookieHeaders(headers)
+
+    def info(self) -> _TestCookieHeaders:
+        return self.headers
+
+
+class _TestCookieJar(CookieJar):
+    """A cookielib.CookieJar modified to inject and read cookie headers from
+    and to wsgi environments, and wsgi application responses.
+    """
+
+    def inject_wsgi(self, environ: "WSGIEnvironment") -> None:
+        """Inject the cookies as client headers into the server's wsgi
+        environment.
+        """
+        cvals = [f"{c.name}={c.value}" for c in self]
+
+        if cvals:
+            environ["HTTP_COOKIE"] = "; ".join(cvals)
+        else:
+            environ.pop("HTTP_COOKIE", None)
+
+    def extract_wsgi(
+        self,
+        environ: "WSGIEnvironment",
+        headers: t.Union[Headers, t.List[t.Tuple[str, str]]],
+    ) -> None:
+        """Extract the server's set-cookie headers as cookies into the
+        cookie jar.
+        """
+        self.extract_cookies(
+            _TestCookieResponse(headers),  # type: ignore
+            _UrllibRequest(get_current_url(environ)),
+        )
+
+
+def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[t.Tuple[str, t.Any]]:
+    """Iterate over a mapping that might have a list of values, yielding
+    all key, value pairs. Almost like iter_multi_items but only allows
+    lists, not tuples, of values so tuples can be used for files.
+    """
+    if isinstance(data, MultiDict):
+        yield from data.items(multi=True)
+    else:
+        for key, value in data.items():
+            if isinstance(value, list):
+                for v in value:
+                    yield key, v
+            else:
+                yield key, value
+
+
+_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict)
+
+
+class EnvironBuilder:
+    """This class can be used to conveniently create a WSGI environment
+    for testing purposes.  It can be used to quickly create WSGI environments
+    or request objects from arbitrary data.
+
+    The signature of this class is also used in some other places as of
+    Werkzeug 0.5 (:func:`create_environ`, :meth:`Response.from_values`,
+    :meth:`Client.open`).  Because of this most of the functionality is
+    available through the constructor alone.
+
+    Files and regular form data can be manipulated independently of each
+    other with the :attr:`form` and :attr:`files` attributes, but are
+    passed with the same argument to the constructor: `data`.
+
+    `data` can be any of these values:
+
+    -   a `str` or `bytes` object: The object is converted into an
+        :attr:`input_stream`, the :attr:`content_length` is set and you have to
+        provide a :attr:`content_type`.
+    -   a `dict` or :class:`MultiDict`: The keys have to be strings. The values
+        have to be either any of the following objects, or a list of any of the
+        following objects:
+
+        -   a :class:`file`-like object:  These are converted into
+            :class:`FileStorage` objects automatically.
+        -   a `tuple`:  The :meth:`~FileMultiDict.add_file` method is called
+            with the key and the unpacked `tuple` items as positional
+            arguments.
+        -   a `str`:  The string is set as form data for the associated key.
+    -   a file-like object: The object content is loaded in memory and then
+        handled like a regular `str` or a `bytes`.
+
+    :param path: the path of the request.  In the WSGI environment this will
+                 end up as `PATH_INFO`.  If the `query_string` is not defined
+                 and there is a question mark in the `path` everything after
+                 it is used as query string.
+    :param base_url: the base URL is a URL that is used to extract the WSGI
+                     URL scheme, host (server name + server port) and the
+                     script root (`SCRIPT_NAME`).
+    :param query_string: an optional string or dict with URL parameters.
+    :param method: the HTTP method to use, defaults to `GET`.
+    :param input_stream: an optional input stream.  Do not specify this and
+                         `data`.  As soon as an input stream is set you can't
+                         modify :attr:`args` and :attr:`files` unless you
+                         set the :attr:`input_stream` to `None` again.
+    :param content_type: The content type for the request.  As of 0.5 you
+                         don't have to provide this when specifying files
+                         and form data via `data`.
+    :param content_length: The content length for the request.  You don't
+                           have to specify this when providing data via
+                           `data`.
+    :param errors_stream: an optional error stream that is used for
+                          `wsgi.errors`.  Defaults to :data:`stderr`.
+    :param multithread: controls `wsgi.multithread`.  Defaults to `False`.
+    :param multiprocess: controls `wsgi.multiprocess`.  Defaults to `False`.
+    :param run_once: controls `wsgi.run_once`.  Defaults to `False`.
+    :param headers: an optional list or :class:`Headers` object of headers.
+    :param data: a string or dict of form data or a file-object.
+                 See explanation above.
+    :param json: An object to be serialized and assigned to ``data``.
+        Defaults the content type to ``"application/json"``.
+        Serialized with the function assigned to :attr:`json_dumps`.
+    :param environ_base: an optional dict of environment defaults.
+    :param environ_overrides: an optional dict of environment overrides.
+    :param charset: the charset used to encode string data.
+    :param auth: An authorization object to use for the
+        ``Authorization`` header value. A ``(username, password)`` tuple
+        is a shortcut for ``Basic`` authorization.
+
+    .. versionchanged:: 2.0
+        ``REQUEST_URI`` and ``RAW_URI`` is the full raw URI including
+        the query string, not only the path.
+
+    .. versionchanged:: 2.0
+        The default :attr:`request_class` is ``Request`` instead of
+        ``BaseRequest``.
+
+    .. versionadded:: 2.0
+       Added the ``auth`` parameter.
+
+    .. versionadded:: 0.15
+        The ``json`` param and :meth:`json_dumps` method.
+
+    .. versionadded:: 0.15
+        The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing
+        the path before perecent-decoding. This is not part of the WSGI
+        PEP, but many WSGI servers include it.
+
+    .. versionchanged:: 0.6
+       ``path`` and ``base_url`` can now be unicode strings that are
+       encoded with :func:`iri_to_uri`.
+    """
+
+    #: the server protocol to use.  defaults to HTTP/1.1
+    server_protocol = "HTTP/1.1"
+
+    #: the wsgi version to use.  defaults to (1, 0)
+    wsgi_version = (1, 0)
+
+    #: The default request class used by :meth:`get_request`.
+    request_class = Request
+
+    import json
+
+    #: The serialization function used when ``json`` is passed.
+    json_dumps = staticmethod(json.dumps)
+    del json
+
+    _args: t.Optional[MultiDict]
+    _query_string: t.Optional[str]
+    _input_stream: t.Optional[t.BinaryIO]
+    _form: t.Optional[MultiDict]
+    _files: t.Optional[FileMultiDict]
+
+    def __init__(
+        self,
+        path: str = "/",
+        base_url: t.Optional[str] = None,
+        query_string: t.Optional[t.Union[t.Mapping[str, str], str]] = None,
+        method: str = "GET",
+        input_stream: t.Optional[t.BinaryIO] = None,
+        content_type: t.Optional[str] = None,
+        content_length: t.Optional[int] = None,
+        errors_stream: t.Optional[t.TextIO] = None,
+        multithread: bool = False,
+        multiprocess: bool = False,
+        run_once: bool = False,
+        headers: t.Optional[t.Union[Headers, t.Iterable[t.Tuple[str, str]]]] = None,
+        data: t.Optional[t.Union[t.BinaryIO, str, bytes, t.Mapping[str, t.Any]]] = None,
+        environ_base: t.Optional[t.Mapping[str, t.Any]] = None,
+        environ_overrides: t.Optional[t.Mapping[str, t.Any]] = None,
+        charset: str = "utf-8",
+        mimetype: t.Optional[str] = None,
+        json: t.Optional[t.Mapping[str, t.Any]] = None,
+        auth: t.Optional[t.Union[Authorization, t.Tuple[str, str]]] = None,
+    ) -> None:
+        path_s = _make_encode_wrapper(path)
+        if query_string is not None and path_s("?") in path:
+            raise ValueError("Query string is defined in the path and as an argument")
+        request_uri = url_parse(path)
+        if query_string is None and path_s("?") in path:
+            query_string = request_uri.query
+        self.charset = charset
+        self.path = iri_to_uri(request_uri.path)
+        self.request_uri = path
+        if base_url is not None:
+            base_url = url_fix(iri_to_uri(base_url, charset), charset)
+        self.base_url = base_url  # type: ignore
+        if isinstance(query_string, (bytes, str)):
+            self.query_string = query_string
+        else:
+            if query_string is None:
+                query_string = MultiDict()
+            elif not isinstance(query_string, MultiDict):
+                query_string = MultiDict(query_string)
+            self.args = query_string
+        self.method = method
+        if headers is None:
+            headers = Headers()
+        elif not isinstance(headers, Headers):
+            headers = Headers(headers)
+        self.headers = headers
+        if content_type is not None:
+            self.content_type = content_type
+        if errors_stream is None:
+            errors_stream = sys.stderr
+        self.errors_stream = errors_stream
+        self.multithread = multithread
+        self.multiprocess = multiprocess
+        self.run_once = run_once
+        self.environ_base = environ_base
+        self.environ_overrides = environ_overrides
+        self.input_stream = input_stream
+        self.content_length = content_length
+        self.closed = False
+
+        if auth is not None:
+            if isinstance(auth, tuple):
+                auth = Authorization(
+                    "basic", {"username": auth[0], "password": auth[1]}
+                )
+
+            self.headers.set("Authorization", auth.to_header())
+
+        if json is not None:
+            if data is not None:
+                raise TypeError("can't provide both json and data")
+
+            data = self.json_dumps(json)
+
+            if self.content_type is None:
+                self.content_type = "application/json"
+
+        if data:
+            if input_stream is not None:
+                raise TypeError("can't provide input stream and data")
+            if hasattr(data, "read"):
+                data = data.read()  # type: ignore
+            if isinstance(data, str):
+                data = data.encode(self.charset)
+            if isinstance(data, bytes):
+                self.input_stream = BytesIO(data)
+                if self.content_length is None:
+                    self.content_length = len(data)
+            else:
+                for key, value in _iter_data(data):  # type: ignore
+                    if isinstance(value, (tuple, dict)) or hasattr(value, "read"):
+                        self._add_file_from_data(key, value)
+                    else:
+                        self.form.setlistdefault(key).append(value)
+
+        if mimetype is not None:
+            self.mimetype = mimetype
+
+    @classmethod
+    def from_environ(
+        cls, environ: "WSGIEnvironment", **kwargs: t.Any
+    ) -> "EnvironBuilder":
+        """Turn an environ dict back into a builder. Any extra kwargs
+        override the args extracted from the environ.
+
+        .. versionchanged:: 2.0
+            Path and query values are passed through the WSGI decoding
+            dance to avoid double encoding.
+
+        .. versionadded:: 0.15
+        """
+        headers = Headers(EnvironHeaders(environ))
+        out = {
+            "path": _wsgi_decoding_dance(environ["PATH_INFO"]),
+            "base_url": cls._make_base_url(
+                environ["wsgi.url_scheme"],
+                headers.pop("Host"),
+                _wsgi_decoding_dance(environ["SCRIPT_NAME"]),
+            ),
+            "query_string": _wsgi_decoding_dance(environ["QUERY_STRING"]),
+            "method": environ["REQUEST_METHOD"],
+            "input_stream": environ["wsgi.input"],
+            "content_type": headers.pop("Content-Type", None),
+            "content_length": headers.pop("Content-Length", None),
+            "errors_stream": environ["wsgi.errors"],
+            "multithread": environ["wsgi.multithread"],
+            "multiprocess": environ["wsgi.multiprocess"],
+            "run_once": environ["wsgi.run_once"],
+            "headers": headers,
+        }
+        out.update(kwargs)
+        return cls(**out)
+
+    def _add_file_from_data(
+        self,
+        key: str,
+        value: t.Union[
+            t.BinaryIO, t.Tuple[t.BinaryIO, str], t.Tuple[t.BinaryIO, str, str]
+        ],
+    ) -> None:
+        """Called in the EnvironBuilder to add files from the data dict."""
+        if isinstance(value, tuple):
+            self.files.add_file(key, *value)
+        else:
+            self.files.add_file(key, value)
+
+    @staticmethod
+    def _make_base_url(scheme: str, host: str, script_root: str) -> str:
+        return url_unparse((scheme, host, script_root, "", "")).rstrip("/") + "/"
+
+    @property
+    def base_url(self) -> str:
+        """The base URL is used to extract the URL scheme, host name,
+        port, and root path.
+        """
+        return self._make_base_url(self.url_scheme, self.host, self.script_root)
+
+    @base_url.setter
+    def base_url(self, value: t.Optional[str]) -> None:
+        if value is None:
+            scheme = "http"
+            netloc = "localhost"
+            script_root = ""
+        else:
+            scheme, netloc, script_root, qs, anchor = url_parse(value)
+            if qs or anchor:
+                raise ValueError("base url must not contain a query string or fragment")
+        self.script_root = script_root.rstrip("/")
+        self.host = netloc
+        self.url_scheme = scheme
+
+    @property
+    def content_type(self) -> t.Optional[str]:
+        """The content type for the request.  Reflected from and to
+        the :attr:`headers`.  Do not set if you set :attr:`files` or
+        :attr:`form` for auto detection.
+        """
+        ct = self.headers.get("Content-Type")
+        if ct is None and not self._input_stream:
+            if self._files:
+                return "multipart/form-data"
+            if self._form:
+                return "application/x-www-form-urlencoded"
+            return None
+        return ct
+
+    @content_type.setter
+    def content_type(self, value: t.Optional[str]) -> None:
+        if value is None:
+            self.headers.pop("Content-Type", None)
+        else:
+            self.headers["Content-Type"] = value
+
+    @property
+    def mimetype(self) -> t.Optional[str]:
+        """The mimetype (content type without charset etc.)
+
+        .. versionadded:: 0.14
+        """
+        ct = self.content_type
+        return ct.split(";")[0].strip() if ct else None
+
+    @mimetype.setter
+    def mimetype(self, value: str) -> None:
+        self.content_type = get_content_type(value, self.charset)
+
+    @property
+    def mimetype_params(self) -> t.Mapping[str, str]:
+        """The mimetype parameters as dict.  For example if the
+        content type is ``text/html; charset=utf-8`` the params would be
+        ``{'charset': 'utf-8'}``.
+
+        .. versionadded:: 0.14
+        """
+
+        def on_update(d: t.Mapping[str, str]) -> None:
+            self.headers["Content-Type"] = dump_options_header(self.mimetype, d)
+
+        d = parse_options_header(self.headers.get("content-type", ""))[1]
+        return CallbackDict(d, on_update)
+
+    @property
+    def content_length(self) -> t.Optional[int]:
+        """The content length as integer.  Reflected from and to the
+        :attr:`headers`.  Do not set if you set :attr:`files` or
+        :attr:`form` for auto detection.
+        """
+        return self.headers.get("Content-Length", type=int)
+
+    @content_length.setter
+    def content_length(self, value: t.Optional[int]) -> None:
+        if value is None:
+            self.headers.pop("Content-Length", None)
+        else:
+            self.headers["Content-Length"] = str(value)
+
+    def _get_form(self, name: str, storage: t.Type[_TAnyMultiDict]) -> _TAnyMultiDict:
+        """Common behavior for getting the :attr:`form` and
+        :attr:`files` properties.
+
+        :param name: Name of the internal cached attribute.
+        :param storage: Storage class used for the data.
+        """
+        if self.input_stream is not None:
+            raise AttributeError("an input stream is defined")
+
+        rv = getattr(self, name)
+
+        if rv is None:
+            rv = storage()
+            setattr(self, name, rv)
+
+        return rv  # type: ignore
+
+    def _set_form(self, name: str, value: MultiDict) -> None:
+        """Common behavior for setting the :attr:`form` and
+        :attr:`files` properties.
+
+        :param name: Name of the internal cached attribute.
+        :param value: Value to assign to the attribute.
+        """
+        self._input_stream = None
+        setattr(self, name, value)
+
+    @property
+    def form(self) -> MultiDict:
+        """A :class:`MultiDict` of form values."""
+        return self._get_form("_form", MultiDict)
+
+    @form.setter
+    def form(self, value: MultiDict) -> None:
+        self._set_form("_form", value)
+
+    @property
+    def files(self) -> FileMultiDict:
+        """A :class:`FileMultiDict` of uploaded files. Use
+        :meth:`~FileMultiDict.add_file` to add new files.
+        """
+        return self._get_form("_files", FileMultiDict)
+
+    @files.setter
+    def files(self, value: FileMultiDict) -> None:
+        self._set_form("_files", value)
+
+    @property
+    def input_stream(self) -> t.Optional[t.BinaryIO]:
+        """An optional input stream. This is mutually exclusive with
+        setting :attr:`form` and :attr:`files`, setting it will clear
+        those. Do not provide this if the method is not ``POST`` or
+        another method that has a body.
+        """
+        return self._input_stream
+
+    @input_stream.setter
+    def input_stream(self, value: t.Optional[t.BinaryIO]) -> None:
+        self._input_stream = value
+        self._form = None
+        self._files = None
+
+    @property
+    def query_string(self) -> str:
+        """The query string.  If you set this to a string
+        :attr:`args` will no longer be available.
+        """
+        if self._query_string is None:
+            if self._args is not None:
+                return url_encode(self._args, charset=self.charset)
+            return ""
+        return self._query_string
+
+    @query_string.setter
+    def query_string(self, value: t.Optional[str]) -> None:
+        self._query_string = value
+        self._args = None
+
+    @property
+    def args(self) -> MultiDict:
+        """The URL arguments as :class:`MultiDict`."""
+        if self._query_string is not None:
+            raise AttributeError("a query string is defined")
+        if self._args is None:
+            self._args = MultiDict()
+        return self._args
+
+    @args.setter
+    def args(self, value: t.Optional[MultiDict]) -> None:
+        self._query_string = None
+        self._args = value
+
+    @property
+    def server_name(self) -> str:
+        """The server name (read-only, use :attr:`host` to set)"""
+        return self.host.split(":", 1)[0]
+
+    @property
+    def server_port(self) -> int:
+        """The server port as integer (read-only, use :attr:`host` to set)"""
+        pieces = self.host.split(":", 1)
+        if len(pieces) == 2 and pieces[1].isdigit():
+            return int(pieces[1])
+        if self.url_scheme == "https":
+            return 443
+        return 80
+
+    def __del__(self) -> None:
+        try:
+            self.close()
+        except Exception:
+            pass
+
+    def close(self) -> None:
+        """Closes all files.  If you put real :class:`file` objects into the
+        :attr:`files` dict you can call this method to automatically close
+        them all in one go.
+        """
+        if self.closed:
+            return
+        try:
+            files = self.files.values()
+        except AttributeError:
+            files = ()  # type: ignore
+        for f in files:
+            try:
+                f.close()
+            except Exception:
+                pass
+        self.closed = True
+
+    def get_environ(self) -> "WSGIEnvironment":
+        """Return the built environ.
+
+        .. versionchanged:: 0.15
+            The content type and length headers are set based on
+            input stream detection. Previously this only set the WSGI
+            keys.
+        """
+        input_stream = self.input_stream
+        content_length = self.content_length
+
+        mimetype = self.mimetype
+        content_type = self.content_type
+
+        if input_stream is not None:
+            start_pos = input_stream.tell()
+            input_stream.seek(0, 2)
+            end_pos = input_stream.tell()
+            input_stream.seek(start_pos)
+            content_length = end_pos - start_pos
+        elif mimetype == "multipart/form-data":
+            input_stream, content_length, boundary = stream_encode_multipart(
+                CombinedMultiDict([self.form, self.files]), charset=self.charset
+            )
+            content_type = f'{mimetype}; boundary="{boundary}"'
+        elif mimetype == "application/x-www-form-urlencoded":
+            form_encoded = url_encode(self.form, charset=self.charset).encode("ascii")
+            content_length = len(form_encoded)
+            input_stream = BytesIO(form_encoded)
+        else:
+            input_stream = BytesIO()
+
+        result: "WSGIEnvironment" = {}
+        if self.environ_base:
+            result.update(self.environ_base)
+
+        def _path_encode(x: str) -> str:
+            return _wsgi_encoding_dance(url_unquote(x, self.charset), self.charset)
+
+        raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset)
+        result.update(
+            {
+                "REQUEST_METHOD": self.method,
+                "SCRIPT_NAME": _path_encode(self.script_root),
+                "PATH_INFO": _path_encode(self.path),
+                "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset),
+                # Non-standard, added by mod_wsgi, uWSGI
+                "REQUEST_URI": raw_uri,
+                # Non-standard, added by gunicorn
+                "RAW_URI": raw_uri,
+                "SERVER_NAME": self.server_name,
+                "SERVER_PORT": str(self.server_port),
+                "HTTP_HOST": self.host,
+                "SERVER_PROTOCOL": self.server_protocol,
+                "wsgi.version": self.wsgi_version,
+                "wsgi.url_scheme": self.url_scheme,
+                "wsgi.input": input_stream,
+                "wsgi.errors": self.errors_stream,
+                "wsgi.multithread": self.multithread,
+                "wsgi.multiprocess": self.multiprocess,
+                "wsgi.run_once": self.run_once,
+            }
+        )
+
+        headers = self.headers.copy()
+
+        if content_type is not None:
+            result["CONTENT_TYPE"] = content_type
+            headers.set("Content-Type", content_type)
+
+        if content_length is not None:
+            result["CONTENT_LENGTH"] = str(content_length)
+            headers.set("Content-Length", content_length)
+
+        combined_headers = defaultdict(list)
+
+        for key, value in headers.to_wsgi_list():
+            combined_headers[f"HTTP_{key.upper().replace('-', '_')}"].append(value)
+
+        for key, values in combined_headers.items():
+            result[key] = ", ".join(values)
+
+        if self.environ_overrides:
+            result.update(self.environ_overrides)
+
+        return result
+
+    def get_request(self, cls: t.Optional[t.Type[Request]] = None) -> Request:
+        """Returns a request with the data.  If the request class is not
+        specified :attr:`request_class` is used.
+
+        :param cls: The request wrapper to use.
+        """
+        if cls is None:
+            cls = self.request_class
+
+        return cls(self.get_environ())
+
+
+class ClientRedirectError(Exception):
+    """If a redirect loop is detected when using follow_redirects=True with
+    the :cls:`Client`, then this exception is raised.
+    """
+
+
+class Client:
+    """This class allows you to send requests to a wrapped application.
+
+    The use_cookies parameter indicates whether cookies should be stored and
+    sent for subsequent requests. This is True by default, but passing False
+    will disable this behaviour.
+
+    If you want to request some subdomain of your application you may set
+    `allow_subdomain_redirects` to `True` as if not no external redirects
+    are allowed.
+
+    .. versionchanged:: 2.0
+        ``response_wrapper`` is always a subclass of
+        :class:``TestResponse``.
+
+    .. versionchanged:: 0.5
+        Added the ``use_cookies`` parameter.
+    """
+
+    def __init__(
+        self,
+        application: "WSGIApplication",
+        response_wrapper: t.Optional[t.Type["Response"]] = None,
+        use_cookies: bool = True,
+        allow_subdomain_redirects: bool = False,
+    ) -> None:
+        self.application = application
+
+        if response_wrapper in {None, Response}:
+            response_wrapper = TestResponse
+        elif not isinstance(response_wrapper, TestResponse):
+            response_wrapper = type(
+                "WrapperTestResponse",
+                (TestResponse, response_wrapper),  # type: ignore
+                {},
+            )
+
+        self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper)
+
+        if use_cookies:
+            self.cookie_jar: t.Optional[_TestCookieJar] = _TestCookieJar()
+        else:
+            self.cookie_jar = None
+
+        self.allow_subdomain_redirects = allow_subdomain_redirects
+
+    def set_cookie(
+        self,
+        server_name: str,
+        key: str,
+        value: str = "",
+        max_age: t.Optional[t.Union[timedelta, int]] = None,
+        expires: t.Optional[t.Union[str, datetime, int, float]] = None,
+        path: str = "/",
+        domain: t.Optional[str] = None,
+        secure: bool = False,
+        httponly: bool = False,
+        samesite: t.Optional[str] = None,
+        charset: str = "utf-8",
+    ) -> None:
+        """Sets a cookie in the client's cookie jar.  The server name
+        is required and has to match the one that is also passed to
+        the open call.
+        """
+        assert self.cookie_jar is not None, "cookies disabled"
+        header = dump_cookie(
+            key,
+            value,
+            max_age,
+            expires,
+            path,
+            domain,
+            secure,
+            httponly,
+            charset,
+            samesite=samesite,
+        )
+        environ = create_environ(path, base_url=f"http://{server_name}")
+        headers = [("Set-Cookie", header)]
+        self.cookie_jar.extract_wsgi(environ, headers)
+
+    def delete_cookie(
+        self,
+        server_name: str,
+        key: str,
+        path: str = "/",
+        domain: t.Optional[str] = None,
+        secure: bool = False,
+        httponly: bool = False,
+        samesite: t.Optional[str] = None,
+    ) -> None:
+        """Deletes a cookie in the test client."""
+        self.set_cookie(
+            server_name,
+            key,
+            expires=0,
+            max_age=0,
+            path=path,
+            domain=domain,
+            secure=secure,
+            httponly=httponly,
+            samesite=samesite,
+        )
+
+    def run_wsgi_app(
+        self, environ: "WSGIEnvironment", buffered: bool = False
+    ) -> t.Tuple[t.Iterable[bytes], str, Headers]:
+        """Runs the wrapped WSGI app with the given environment.
+
+        :meta private:
+        """
+        if self.cookie_jar is not None:
+            self.cookie_jar.inject_wsgi(environ)
+
+        rv = run_wsgi_app(self.application, environ, buffered=buffered)
+
+        if self.cookie_jar is not None:
+            self.cookie_jar.extract_wsgi(environ, rv[2])
+
+        return rv
+
+    def resolve_redirect(
+        self, response: "TestResponse", buffered: bool = False
+    ) -> "TestResponse":
+        """Perform a new request to the location given by the redirect
+        response to the previous request.
+
+        :meta private:
+        """
+        scheme, netloc, path, qs, anchor = url_parse(response.location)
+        builder = EnvironBuilder.from_environ(response.request.environ, query_string=qs)
+
+        to_name_parts = netloc.split(":", 1)[0].split(".")
+        from_name_parts = builder.server_name.split(".")
+
+        if to_name_parts != [""]:
+            # The new location has a host, use it for the base URL.
+            builder.url_scheme = scheme
+            builder.host = netloc
+        else:
+            # A local redirect with autocorrect_location_header=False
+            # doesn't have a host, so use the request's host.
+            to_name_parts = from_name_parts
+
+        # Explain why a redirect to a different server name won't be followed.
+        if to_name_parts != from_name_parts:
+            if to_name_parts[-len(from_name_parts) :] == from_name_parts:
+                if not self.allow_subdomain_redirects:
+                    raise RuntimeError("Following subdomain redirects is not enabled.")
+            else:
+                raise RuntimeError("Following external redirects is not supported.")
+
+        path_parts = path.split("/")
+        root_parts = builder.script_root.split("/")
+
+        if path_parts[: len(root_parts)] == root_parts:
+            # Strip the script root from the path.
+            builder.path = path[len(builder.script_root) :]
+        else:
+            # The new location is not under the script root, so use the
+            # whole path and clear the previous root.
+            builder.path = path
+            builder.script_root = ""
+
+        # Only 307 and 308 preserve all of the original request.
+        if response.status_code not in {307, 308}:
+            # HEAD is preserved, everything else becomes GET.
+            if builder.method != "HEAD":
+                builder.method = "GET"
+
+            # Clear the body and the headers that describe it.
+
+            if builder.input_stream is not None:
+                builder.input_stream.close()
+                builder.input_stream = None
+
+            builder.content_type = None
+            builder.content_length = None
+            builder.headers.pop("Transfer-Encoding", None)
+
+        return self.open(builder, buffered=buffered)
+
+    def open(
+        self,
+        *args: t.Any,
+        as_tuple: bool = False,
+        buffered: bool = False,
+        follow_redirects: bool = False,
+        **kwargs: t.Any,
+    ) -> "TestResponse":
+        """Generate an environ dict from the given arguments, make a
+        request to the application using it, and return the response.
+
+        :param args: Passed to :class:`EnvironBuilder` to create the
+            environ for the request. If a single arg is passed, it can
+            be an existing :class:`EnvironBuilder` or an environ dict.
+        :param buffered: Convert the iterator returned by the app into
+            a list. If the iterator has a ``close()`` method, it is
+            called automatically.
+        :param follow_redirects: Make additional requests to follow HTTP
+            redirects until a non-redirect status is returned.
+            :attr:`TestResponse.history` lists the intermediate
+            responses.
+
+        .. versionchanged:: 2.0
+            ``as_tuple`` is deprecated and will be removed in Werkzeug
+            2.1. Use :attr:`TestResponse.request` and
+            ``request.environ`` instead.
+
+        .. versionchanged:: 2.0
+            The request input stream is closed when calling
+            ``response.close()``. Input streams for redirects are
+            automatically closed.
+
+        .. versionchanged:: 0.5
+            If a dict is provided as file in the dict for the ``data``
+            parameter the content type has to be called ``content_type``
+            instead of ``mimetype``. This change was made for
+            consistency with :class:`werkzeug.FileWrapper`.
+
+        .. versionchanged:: 0.5
+            Added the ``follow_redirects`` parameter.
+        """
+        request: t.Optional["Request"] = None
+
+        if not kwargs and len(args) == 1:
+            arg = args[0]
+
+            if isinstance(arg, EnvironBuilder):
+                request = arg.get_request()
+            elif isinstance(arg, dict):
+                request = EnvironBuilder.from_environ(arg).get_request()
+            elif isinstance(arg, Request):
+                request = arg
+
+        if request is None:
+            builder = EnvironBuilder(*args, **kwargs)
+
+            try:
+                request = builder.get_request()
+            finally:
+                builder.close()
+
+        response = self.run_wsgi_app(request.environ, buffered=buffered)
+        response = self.response_wrapper(*response, request=request)
+
+        redirects = set()
+        history: t.List["TestResponse"] = []
+
+        while follow_redirects and response.status_code in {
+            301,
+            302,
+            303,
+            305,
+            307,
+            308,
+        }:
+            # Exhaust intermediate response bodies to ensure middleware
+            # that returns an iterator runs any cleanup code.
+            if not buffered:
+                response.make_sequence()
+                response.close()
+
+            new_redirect_entry = (response.location, response.status_code)
+
+            if new_redirect_entry in redirects:
+                raise ClientRedirectError(
+                    f"Loop detected: A {response.status_code} redirect"
+                    f" to {response.location} was already made."
+                )
+
+            redirects.add(new_redirect_entry)
+            response.history = tuple(history)
+            history.append(response)
+            response = self.resolve_redirect(response, buffered=buffered)
+        else:
+            # This is the final request after redirects, or not
+            # following redirects.
+            response.history = tuple(history)
+            # Close the input stream when closing the response, in case
+            # the input is an open temporary file.
+            response.call_on_close(request.input_stream.close)
+
+        if as_tuple:
+            warnings.warn(
+                "'as_tuple' is deprecated and will be removed in"
+                " Werkzeug 2.1. Access 'response.request.environ'"
+                " instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            return request.environ, response  # type: ignore
+
+        return response
+
+    def get(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``GET``."""
+        kw["method"] = "GET"
+        return self.open(*args, **kw)
+
+    def post(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``POST``."""
+        kw["method"] = "POST"
+        return self.open(*args, **kw)
+
+    def put(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``PUT``."""
+        kw["method"] = "PUT"
+        return self.open(*args, **kw)
+
+    def delete(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``DELETE``."""
+        kw["method"] = "DELETE"
+        return self.open(*args, **kw)
+
+    def patch(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``PATCH``."""
+        kw["method"] = "PATCH"
+        return self.open(*args, **kw)
+
+    def options(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``OPTIONS``."""
+        kw["method"] = "OPTIONS"
+        return self.open(*args, **kw)
+
+    def head(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``HEAD``."""
+        kw["method"] = "HEAD"
+        return self.open(*args, **kw)
+
+    def trace(self, *args: t.Any, **kw: t.Any) -> "TestResponse":
+        """Call :meth:`open` with ``method`` set to ``TRACE``."""
+        kw["method"] = "TRACE"
+        return self.open(*args, **kw)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.application!r}>"
+
+
+def create_environ(*args: t.Any, **kwargs: t.Any) -> "WSGIEnvironment":
+    """Create a new WSGI environ dict based on the values passed.  The first
+    parameter should be the path of the request which defaults to '/'.  The
+    second one can either be an absolute path (in that case the host is
+    localhost:80) or a full path to the request with scheme, netloc port and
+    the path to the script.
+
+    This accepts the same arguments as the :class:`EnvironBuilder`
+    constructor.
+
+    .. versionchanged:: 0.5
+       This function is now a thin wrapper over :class:`EnvironBuilder` which
+       was added in 0.5.  The `headers`, `environ_base`, `environ_overrides`
+       and `charset` parameters were added.
+    """
+    builder = EnvironBuilder(*args, **kwargs)
+
+    try:
+        return builder.get_environ()
+    finally:
+        builder.close()
+
+
+def run_wsgi_app(
+    app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False
+) -> t.Tuple[t.Iterable[bytes], str, Headers]:
+    """Return a tuple in the form (app_iter, status, headers) of the
+    application output.  This works best if you pass it an application that
+    returns an iterator all the time.
+
+    Sometimes applications may use the `write()` callable returned
+    by the `start_response` function.  This tries to resolve such edge
+    cases automatically.  But if you don't get the expected output you
+    should set `buffered` to `True` which enforces buffering.
+
+    If passed an invalid WSGI application the behavior of this function is
+    undefined.  Never pass non-conforming WSGI applications to this function.
+
+    :param app: the application to execute.
+    :param buffered: set to `True` to enforce buffering.
+    :return: tuple in the form ``(app_iter, status, headers)``
+    """
+    # Copy environ to ensure any mutations by the app (ProxyFix, for
+    # example) don't affect subsequent requests (such as redirects).
+    environ = _get_environ(environ).copy()
+    status: str
+    response: t.Optional[t.Tuple[str, t.List[t.Tuple[str, str]]]] = None
+    buffer: t.List[bytes] = []
+
+    def start_response(status, headers, exc_info=None):  # type: ignore
+        nonlocal response
+
+        if exc_info:
+            try:
+                raise exc_info[1].with_traceback(exc_info[2])
+            finally:
+                exc_info = None
+
+        response = (status, headers)
+        return buffer.append
+
+    app_rv = app(environ, start_response)
+    close_func = getattr(app_rv, "close", None)
+    app_iter: t.Iterable[bytes] = iter(app_rv)
+
+    # when buffering we emit the close call early and convert the
+    # application iterator into a regular list
+    if buffered:
+        try:
+            app_iter = list(app_iter)
+        finally:
+            if close_func is not None:
+                close_func()
+
+    # otherwise we iterate the application iter until we have a response, chain
+    # the already received data with the already collected data and wrap it in
+    # a new `ClosingIterator` if we need to restore a `close` callable from the
+    # original return value.
+    else:
+        for item in app_iter:
+            buffer.append(item)
+
+            if response is not None:
+                break
+
+        if buffer:
+            app_iter = chain(buffer, app_iter)
+
+        if close_func is not None and app_iter is not app_rv:
+            app_iter = ClosingIterator(app_iter, close_func)
+
+    status, headers = response  # type: ignore
+    return app_iter, status, Headers(headers)
+
+
+class TestResponse(Response):
+    """:class:`~werkzeug.wrappers.Response` subclass that provides extra
+    information about requests made with the test :class:`Client`.
+
+    Test client requests will always return an instance of this class.
+    If a custom response class is passed to the client, it is
+    subclassed along with this to support test information.
+
+    If the test request included large files, or if the application is
+    serving a file, call :meth:`close` to close any open files and
+    prevent Python showing a ``ResourceWarning``.
+    """
+
+    request: Request
+    """A request object with the environ used to make the request that
+    resulted in this response.
+    """
+
+    history: t.Tuple["TestResponse", ...]
+    """A list of intermediate responses. Populated when the test request
+    is made with ``follow_redirects`` enabled.
+    """
+
+    def __init__(
+        self,
+        response: t.Iterable[bytes],
+        status: str,
+        headers: Headers,
+        request: Request,
+        history: t.Tuple["TestResponse"] = (),  # type: ignore
+        **kwargs: t.Any,
+    ) -> None:
+        super().__init__(response, status, headers, **kwargs)
+        self.request = request
+        self.history = history
+        self._compat_tuple = response, status, headers
+
+    def __iter__(self) -> t.Iterator:
+        warnings.warn(
+            (
+                "The test client no longer returns a tuple, it returns"
+                " a 'TestResponse'. Tuple unpacking is deprecated and"
+                " will be removed in Werkzeug 2.1. Access the"
+                " attributes 'data', 'status', and 'headers' instead."
+            ),
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return iter(self._compat_tuple)
+
+    def __getitem__(self, item: int) -> t.Any:
+        warnings.warn(
+            (
+                "The test client no longer returns a tuple, it returns"
+                " a 'TestResponse'. Item indexing is deprecated and"
+                " will be removed in Werkzeug 2.1. Access the"
+                " attributes 'data', 'status', and 'headers' instead."
+            ),
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self._compat_tuple[item]
diff --git a/venv/lib/python3.7/site-packages/werkzeug/testapp.py b/venv/lib/python3.7/site-packages/werkzeug/testapp.py
new file mode 100644
index 00000000..981f8878
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/testapp.py
@@ -0,0 +1,240 @@
+"""A small application that can be used to test a WSGI server and check
+it for WSGI compliance.
+"""
+import base64
+import os
+import sys
+import typing as t
+from html import escape
+from textwrap import wrap
+
+from . import __version__ as _werkzeug_version
+from .wrappers.request import Request
+from .wrappers.response import Response
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+logo = Response(
+    base64.b64decode(
+        """
+R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP/////////
+//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv
+nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25
+7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq
+ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX
+m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G
+p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo
+SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf
+78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA
+ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA
+tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx
+w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx
+lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45
+Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB
+yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd
+dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r
+idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh
+EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8
+ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64
+gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C
+JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y
+Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9
+YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX
+c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb
+qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL
+cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG
+cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2
+KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe
+EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb
+UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB
+Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z
+aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn
+kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs
+="""
+    ),
+    mimetype="image/png",
+)
+
+
+TEMPLATE = """\
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+  "http://www.w3.org/TR/html4/loose.dtd">
+<title>WSGI Information</title>
+<style type="text/css">
+  @import url(https://fonts.googleapis.com/css?family=Ubuntu);
+
+  body       { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+               'Verdana', sans-serif; background-color: white; color: #000;
+               font-size: 15px; text-align: center; }
+  #logo      { float: right; padding: 0 0 10px 10px; }
+  div.box    { text-align: left; width: 45em; margin: auto; padding: 50px 0;
+               background-color: white; }
+  h1, h2     { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode',
+               'Geneva', 'Verdana', sans-serif; font-weight: normal; }
+  h1         { margin: 0 0 30px 0; }
+  h2         { font-size: 1.4em; margin: 1em 0 0.5em 0; }
+  table      { width: 100%%; border-collapse: collapse; border: 1px solid #AFC5C9 }
+  table th   { background-color: #AFC1C4; color: white; font-size: 0.72em;
+               font-weight: normal; width: 18em; vertical-align: top;
+               padding: 0.5em 0 0.1em 0.5em; }
+  table td   { border: 1px solid #AFC5C9; padding: 0.1em 0 0.1em 0.5em; }
+  code       { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono',
+               monospace; font-size: 0.7em; }
+  ul li      { line-height: 1.5em; }
+  ul.path    { font-size: 0.7em; margin: 0 -30px; padding: 8px 30px;
+               list-style: none; background: #E8EFF0; }
+  ul.path li { line-height: 1.6em; }
+  li.virtual { color: #999; text-decoration: underline; }
+  li.exp     { background: white; }
+</style>
+<div class="box">
+  <img src="?resource=logo" id="logo" alt="[The Werkzeug Logo]" />
+  <h1>WSGI Information</h1>
+  <p>
+    This page displays all available information about the WSGI server and
+    the underlying Python interpreter.
+  <h2 id="python-interpreter">Python Interpreter</h2>
+  <table>
+    <tr>
+      <th>Python Version
+      <td>%(python_version)s
+    <tr>
+      <th>Platform
+      <td>%(platform)s [%(os)s]
+    <tr>
+      <th>API Version
+      <td>%(api_version)s
+    <tr>
+      <th>Byteorder
+      <td>%(byteorder)s
+    <tr>
+      <th>Werkzeug Version
+      <td>%(werkzeug_version)s
+  </table>
+  <h2 id="wsgi-environment">WSGI Environment</h2>
+  <table>%(wsgi_env)s</table>
+  <h2 id="installed-eggs">Installed Eggs</h2>
+  <p>
+    The following python packages were installed on the system as
+    Python eggs:
+  <ul>%(python_eggs)s</ul>
+  <h2 id="sys-path">System Path</h2>
+  <p>
+    The following paths are the current contents of the load path.  The
+    following entries are looked up for Python packages.  Note that not
+    all items in this path are folders.  Gray and underlined items are
+    entries pointing to invalid resources or used by custom import hooks
+    such as the zip importer.
+  <p>
+    Items with a bright background were expanded for display from a relative
+    path.  If you encounter such paths in the output you might want to check
+    your setup as relative paths are usually problematic in multithreaded
+    environments.
+  <ul class="path">%(sys_path)s</ul>
+</div>
+"""
+
+
+def iter_sys_path() -> t.Iterator[t.Tuple[str, bool, bool]]:
+    if os.name == "posix":
+
+        def strip(x: str) -> str:
+            prefix = os.path.expanduser("~")
+            if x.startswith(prefix):
+                x = f"~{x[len(prefix) :]}"
+            return x
+
+    else:
+
+        def strip(x: str) -> str:
+            return x
+
+    cwd = os.path.abspath(os.getcwd())
+    for item in sys.path:
+        path = os.path.join(cwd, item or os.path.curdir)
+        yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item
+
+
+def render_testapp(req: Request) -> bytes:
+    try:
+        import pkg_resources
+    except ImportError:
+        eggs: t.Iterable[t.Any] = ()
+    else:
+        eggs = sorted(
+            pkg_resources.working_set,
+            key=lambda x: x.project_name.lower(),  # type: ignore
+        )
+    python_eggs = []
+    for egg in eggs:
+        try:
+            version = egg.version
+        except (ValueError, AttributeError):
+            version = "unknown"
+        python_eggs.append(
+            f"<li>{escape(egg.project_name)} <small>[{escape(version)}]</small>"
+        )
+
+    wsgi_env = []
+    sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower())
+    for key, value in sorted_environ:
+        value = "".join(wrap(escape(repr(value))))
+        wsgi_env.append(f"<tr><th>{escape(str(key))}<td><code>{value}</code>")
+
+    sys_path = []
+    for item, virtual, expanded in iter_sys_path():
+        class_ = []
+        if virtual:
+            class_.append("virtual")
+        if expanded:
+            class_.append("exp")
+        class_ = f' class="{" ".join(class_)}"' if class_ else ""
+        sys_path.append(f"<li{class_}>{escape(item)}")
+
+    return (
+        TEMPLATE
+        % {
+            "python_version": "<br>".join(escape(sys.version).splitlines()),
+            "platform": escape(sys.platform),
+            "os": escape(os.name),
+            "api_version": sys.api_version,
+            "byteorder": sys.byteorder,
+            "werkzeug_version": _werkzeug_version,
+            "python_eggs": "\n".join(python_eggs),
+            "wsgi_env": "\n".join(wsgi_env),
+            "sys_path": "\n".join(sys_path),
+        }
+    ).encode("utf-8")
+
+
+def test_app(
+    environ: "WSGIEnvironment", start_response: "StartResponse"
+) -> t.Iterable[bytes]:
+    """Simple test application that dumps the environment.  You can use
+    it to check if Werkzeug is working properly:
+
+    .. sourcecode:: pycon
+
+        >>> from werkzeug.serving import run_simple
+        >>> from werkzeug.testapp import test_app
+        >>> run_simple('localhost', 3000, test_app)
+         * Running on http://localhost:3000/
+
+    The application displays important information from the WSGI environment,
+    the Python interpreter and the installed libraries.
+    """
+    req = Request(environ, populate_request=False)
+    if req.args.get("resource") == "logo":
+        response = logo
+    else:
+        response = Response(render_testapp(req), mimetype="text/html")
+    return response(environ, start_response)
+
+
+if __name__ == "__main__":
+    from .serving import run_simple
+
+    run_simple("localhost", 5000, test_app, use_reloader=True)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/urls.py b/venv/lib/python3.7/site-packages/werkzeug/urls.py
new file mode 100644
index 00000000..7566ac27
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/urls.py
@@ -0,0 +1,1211 @@
+"""Functions for working with URLs.
+
+Contains implementations of functions from :mod:`urllib.parse` that
+handle bytes and strings.
+"""
+import codecs
+import os
+import re
+import typing as t
+import warnings
+
+from ._internal import _check_str_tuple
+from ._internal import _decode_idna
+from ._internal import _encode_idna
+from ._internal import _make_encode_wrapper
+from ._internal import _to_str
+
+if t.TYPE_CHECKING:
+    from . import datastructures as ds
+
+# A regular expression for what a valid schema looks like
+_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$")
+
+# Characters that are safe in any part of an URL.
+_always_safe = frozenset(
+    bytearray(
+        b"abcdefghijklmnopqrstuvwxyz"
+        b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+        b"0123456789"
+        b"-._~"
+    )
+)
+
+_hexdigits = "0123456789ABCDEFabcdef"
+_hextobyte = {
+    f"{a}{b}".encode("ascii"): int(f"{a}{b}", 16)
+    for a in _hexdigits
+    for b in _hexdigits
+}
+_bytetohex = [f"%{char:02X}".encode("ascii") for char in range(256)]
+
+
+class _URLTuple(t.NamedTuple):
+    scheme: str
+    netloc: str
+    path: str
+    query: str
+    fragment: str
+
+
+class BaseURL(_URLTuple):
+    """Superclass of :py:class:`URL` and :py:class:`BytesURL`."""
+
+    __slots__ = ()
+    _at: str
+    _colon: str
+    _lbracket: str
+    _rbracket: str
+
+    def __str__(self) -> str:
+        return self.to_url()
+
+    def replace(self, **kwargs: t.Any) -> "BaseURL":
+        """Return an URL with the same values, except for those parameters
+        given new values by whichever keyword arguments are specified."""
+        return self._replace(**kwargs)
+
+    @property
+    def host(self) -> t.Optional[str]:
+        """The host part of the URL if available, otherwise `None`.  The
+        host is either the hostname or the IP address mentioned in the
+        URL.  It will not contain the port.
+        """
+        return self._split_host()[0]
+
+    @property
+    def ascii_host(self) -> t.Optional[str]:
+        """Works exactly like :attr:`host` but will return a result that
+        is restricted to ASCII.  If it finds a netloc that is not ASCII
+        it will attempt to idna decode it.  This is useful for socket
+        operations when the URL might include internationalized characters.
+        """
+        rv = self.host
+        if rv is not None and isinstance(rv, str):
+            try:
+                rv = _encode_idna(rv)  # type: ignore
+            except UnicodeError:
+                rv = rv.encode("ascii", "ignore")  # type: ignore
+        return _to_str(rv, "ascii", "ignore")
+
+    @property
+    def port(self) -> t.Optional[int]:
+        """The port in the URL as an integer if it was present, `None`
+        otherwise.  This does not fill in default ports.
+        """
+        try:
+            rv = int(_to_str(self._split_host()[1]))
+            if 0 <= rv <= 65535:
+                return rv
+        except (ValueError, TypeError):
+            pass
+        return None
+
+    @property
+    def auth(self) -> t.Optional[str]:
+        """The authentication part in the URL if available, `None`
+        otherwise.
+        """
+        return self._split_netloc()[0]
+
+    @property
+    def username(self) -> t.Optional[str]:
+        """The username if it was part of the URL, `None` otherwise.
+        This undergoes URL decoding and will always be a string.
+        """
+        rv = self._split_auth()[0]
+        if rv is not None:
+            return _url_unquote_legacy(rv)
+        return None
+
+    @property
+    def raw_username(self) -> t.Optional[str]:
+        """The username if it was part of the URL, `None` otherwise.
+        Unlike :attr:`username` this one is not being decoded.
+        """
+        return self._split_auth()[0]
+
+    @property
+    def password(self) -> t.Optional[str]:
+        """The password if it was part of the URL, `None` otherwise.
+        This undergoes URL decoding and will always be a string.
+        """
+        rv = self._split_auth()[1]
+        if rv is not None:
+            return _url_unquote_legacy(rv)
+        return None
+
+    @property
+    def raw_password(self) -> t.Optional[str]:
+        """The password if it was part of the URL, `None` otherwise.
+        Unlike :attr:`password` this one is not being decoded.
+        """
+        return self._split_auth()[1]
+
+    def decode_query(self, *args: t.Any, **kwargs: t.Any) -> "ds.MultiDict[str, str]":
+        """Decodes the query part of the URL.  Ths is a shortcut for
+        calling :func:`url_decode` on the query argument.  The arguments and
+        keyword arguments are forwarded to :func:`url_decode` unchanged.
+        """
+        return url_decode(self.query, *args, **kwargs)
+
+    def join(self, *args: t.Any, **kwargs: t.Any) -> "BaseURL":
+        """Joins this URL with another one.  This is just a convenience
+        function for calling into :meth:`url_join` and then parsing the
+        return value again.
+        """
+        return url_parse(url_join(self, *args, **kwargs))
+
+    def to_url(self) -> str:
+        """Returns a URL string or bytes depending on the type of the
+        information stored.  This is just a convenience function
+        for calling :meth:`url_unparse` for this URL.
+        """
+        return url_unparse(self)
+
+    def encode_netloc(self) -> str:
+        """Encodes the netloc part to an ASCII safe URL as bytes."""
+        rv = self.ascii_host or ""
+        if ":" in rv:
+            rv = f"[{rv}]"
+        port = self.port
+        if port is not None:
+            rv = f"{rv}:{port}"
+        auth = ":".join(
+            filter(
+                None,
+                [
+                    url_quote(self.raw_username or "", "utf-8", "strict", "/:%"),
+                    url_quote(self.raw_password or "", "utf-8", "strict", "/:%"),
+                ],
+            )
+        )
+        if auth:
+            rv = f"{auth}@{rv}"
+        return rv
+
+    def decode_netloc(self) -> str:
+        """Decodes the netloc part into a string."""
+        rv = _decode_idna(self.host or "")
+
+        if ":" in rv:
+            rv = f"[{rv}]"
+        port = self.port
+        if port is not None:
+            rv = f"{rv}:{port}"
+        auth = ":".join(
+            filter(
+                None,
+                [
+                    _url_unquote_legacy(self.raw_username or "", "/:%@"),
+                    _url_unquote_legacy(self.raw_password or "", "/:%@"),
+                ],
+            )
+        )
+        if auth:
+            rv = f"{auth}@{rv}"
+        return rv
+
+    def to_uri_tuple(self) -> "BaseURL":
+        """Returns a :class:`BytesURL` tuple that holds a URI.  This will
+        encode all the information in the URL properly to ASCII using the
+        rules a web browser would follow.
+
+        It's usually more interesting to directly call :meth:`iri_to_uri` which
+        will return a string.
+        """
+        return url_parse(iri_to_uri(self))
+
+    def to_iri_tuple(self) -> "BaseURL":
+        """Returns a :class:`URL` tuple that holds a IRI.  This will try
+        to decode as much information as possible in the URL without
+        losing information similar to how a web browser does it for the
+        URL bar.
+
+        It's usually more interesting to directly call :meth:`uri_to_iri` which
+        will return a string.
+        """
+        return url_parse(uri_to_iri(self))
+
+    def get_file_location(
+        self, pathformat: t.Optional[str] = None
+    ) -> t.Tuple[t.Optional[str], t.Optional[str]]:
+        """Returns a tuple with the location of the file in the form
+        ``(server, location)``.  If the netloc is empty in the URL or
+        points to localhost, it's represented as ``None``.
+
+        The `pathformat` by default is autodetection but needs to be set
+        when working with URLs of a specific system.  The supported values
+        are ``'windows'`` when working with Windows or DOS paths and
+        ``'posix'`` when working with posix paths.
+
+        If the URL does not point to a local file, the server and location
+        are both represented as ``None``.
+
+        :param pathformat: The expected format of the path component.
+                           Currently ``'windows'`` and ``'posix'`` are
+                           supported.  Defaults to ``None`` which is
+                           autodetect.
+        """
+        if self.scheme != "file":
+            return None, None
+
+        path = url_unquote(self.path)
+        host = self.netloc or None
+
+        if pathformat is None:
+            if os.name == "nt":
+                pathformat = "windows"
+            else:
+                pathformat = "posix"
+
+        if pathformat == "windows":
+            if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:":
+                path = f"{path[1:2]}:{path[3:]}"
+            windows_share = path[:3] in ("\\" * 3, "/" * 3)
+            import ntpath
+
+            path = ntpath.normpath(path)
+            # Windows shared drives are represented as ``\\host\\directory``.
+            # That results in a URL like ``file://///host/directory``, and a
+            # path like ``///host/directory``. We need to special-case this
+            # because the path contains the hostname.
+            if windows_share and host is None:
+                parts = path.lstrip("\\").split("\\", 1)
+                if len(parts) == 2:
+                    host, path = parts
+                else:
+                    host = parts[0]
+                    path = ""
+        elif pathformat == "posix":
+            import posixpath
+
+            path = posixpath.normpath(path)
+        else:
+            raise TypeError(f"Invalid path format {pathformat!r}")
+
+        if host in ("127.0.0.1", "::1", "localhost"):
+            host = None
+
+        return host, path
+
+    def _split_netloc(self) -> t.Tuple[t.Optional[str], str]:
+        if self._at in self.netloc:
+            auth, _, netloc = self.netloc.partition(self._at)
+            return auth, netloc
+        return None, self.netloc
+
+    def _split_auth(self) -> t.Tuple[t.Optional[str], t.Optional[str]]:
+        auth = self._split_netloc()[0]
+        if not auth:
+            return None, None
+        if self._colon not in auth:
+            return auth, None
+
+        username, _, password = auth.partition(self._colon)
+        return username, password
+
+    def _split_host(self) -> t.Tuple[t.Optional[str], t.Optional[str]]:
+        rv = self._split_netloc()[1]
+        if not rv:
+            return None, None
+
+        if not rv.startswith(self._lbracket):
+            if self._colon in rv:
+                host, _, port = rv.partition(self._colon)
+                return host, port
+            return rv, None
+
+        idx = rv.find(self._rbracket)
+        if idx < 0:
+            return rv, None
+
+        host = rv[1:idx]
+        rest = rv[idx + 1 :]
+        if rest.startswith(self._colon):
+            return host, rest[1:]
+        return host, None
+
+
+class URL(BaseURL):
+    """Represents a parsed URL.  This behaves like a regular tuple but
+    also has some extra attributes that give further insight into the
+    URL.
+    """
+
+    __slots__ = ()
+    _at = "@"
+    _colon = ":"
+    _lbracket = "["
+    _rbracket = "]"
+
+    def encode(self, charset: str = "utf-8", errors: str = "replace") -> "BytesURL":
+        """Encodes the URL to a tuple made out of bytes.  The charset is
+        only being used for the path, query and fragment.
+        """
+        return BytesURL(
+            self.scheme.encode("ascii"),  # type: ignore
+            self.encode_netloc(),
+            self.path.encode(charset, errors),  # type: ignore
+            self.query.encode(charset, errors),  # type: ignore
+            self.fragment.encode(charset, errors),  # type: ignore
+        )
+
+
+class BytesURL(BaseURL):
+    """Represents a parsed URL in bytes."""
+
+    __slots__ = ()
+    _at = b"@"  # type: ignore
+    _colon = b":"  # type: ignore
+    _lbracket = b"["  # type: ignore
+    _rbracket = b"]"  # type: ignore
+
+    def __str__(self) -> str:
+        return self.to_url().decode("utf-8", "replace")  # type: ignore
+
+    def encode_netloc(self) -> bytes:  # type: ignore
+        """Returns the netloc unchanged as bytes."""
+        return self.netloc  # type: ignore
+
+    def decode(self, charset: str = "utf-8", errors: str = "replace") -> "URL":
+        """Decodes the URL to a tuple made out of strings.  The charset is
+        only being used for the path, query and fragment.
+        """
+        return URL(
+            self.scheme.decode("ascii"),  # type: ignore
+            self.decode_netloc(),
+            self.path.decode(charset, errors),  # type: ignore
+            self.query.decode(charset, errors),  # type: ignore
+            self.fragment.decode(charset, errors),  # type: ignore
+        )
+
+
+_unquote_maps: t.Dict[t.FrozenSet[int], t.Dict[bytes, int]] = {frozenset(): _hextobyte}
+
+
+def _unquote_to_bytes(
+    string: t.Union[str, bytes], unsafe: t.Union[str, bytes] = ""
+) -> bytes:
+    if isinstance(string, str):
+        string = string.encode("utf-8")
+
+    if isinstance(unsafe, str):
+        unsafe = unsafe.encode("utf-8")
+
+    unsafe = frozenset(bytearray(unsafe))
+    groups = iter(string.split(b"%"))
+    result = bytearray(next(groups, b""))
+
+    try:
+        hex_to_byte = _unquote_maps[unsafe]
+    except KeyError:
+        hex_to_byte = _unquote_maps[unsafe] = {
+            h: b for h, b in _hextobyte.items() if b not in unsafe
+        }
+
+    for group in groups:
+        code = group[:2]
+
+        if code in hex_to_byte:
+            result.append(hex_to_byte[code])
+            result.extend(group[2:])
+        else:
+            result.append(37)  # %
+            result.extend(group)
+
+    return bytes(result)
+
+
+def _url_encode_impl(
+    obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]],
+    charset: str,
+    sort: bool,
+    key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]],
+) -> t.Iterator[str]:
+    from .datastructures import iter_multi_items
+
+    iterable: t.Iterable[t.Tuple[str, str]] = iter_multi_items(obj)
+
+    if sort:
+        iterable = sorted(iterable, key=key)
+
+    for key_str, value_str in iterable:
+        if value_str is None:
+            continue
+
+        if not isinstance(key_str, bytes):
+            key_bytes = str(key_str).encode(charset)
+        else:
+            key_bytes = key_str
+
+        if not isinstance(value_str, bytes):
+            value_bytes = str(value_str).encode(charset)
+        else:
+            value_bytes = value_str
+
+        yield f"{_fast_url_quote_plus(key_bytes)}={_fast_url_quote_plus(value_bytes)}"
+
+
+def _url_unquote_legacy(value: str, unsafe: str = "") -> str:
+    try:
+        return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe)
+    except UnicodeError:
+        return url_unquote(value, charset="latin1", unsafe=unsafe)
+
+
+def url_parse(
+    url: str, scheme: t.Optional[str] = None, allow_fragments: bool = True
+) -> BaseURL:
+    """Parses a URL from a string into a :class:`URL` tuple.  If the URL
+    is lacking a scheme it can be provided as second argument. Otherwise,
+    it is ignored.  Optionally fragments can be stripped from the URL
+    by setting `allow_fragments` to `False`.
+
+    The inverse of this function is :func:`url_unparse`.
+
+    :param url: the URL to parse.
+    :param scheme: the default schema to use if the URL is schemaless.
+    :param allow_fragments: if set to `False` a fragment will be removed
+                            from the URL.
+    """
+    s = _make_encode_wrapper(url)
+    is_text_based = isinstance(url, str)
+
+    if scheme is None:
+        scheme = s("")
+    netloc = query = fragment = s("")
+    i = url.find(s(":"))
+    if i > 0 and _scheme_re.match(_to_str(url[:i], errors="replace")):
+        # make sure "iri" is not actually a port number (in which case
+        # "scheme" is really part of the path)
+        rest = url[i + 1 :]
+        if not rest or any(c not in s("0123456789") for c in rest):
+            # not a port number
+            scheme, url = url[:i].lower(), rest
+
+    if url[:2] == s("//"):
+        delim = len(url)
+        for c in s("/?#"):
+            wdelim = url.find(c, 2)
+            if wdelim >= 0:
+                delim = min(delim, wdelim)
+        netloc, url = url[2:delim], url[delim:]
+        if (s("[") in netloc and s("]") not in netloc) or (
+            s("]") in netloc and s("[") not in netloc
+        ):
+            raise ValueError("Invalid IPv6 URL")
+
+    if allow_fragments and s("#") in url:
+        url, fragment = url.split(s("#"), 1)
+    if s("?") in url:
+        url, query = url.split(s("?"), 1)
+
+    result_type = URL if is_text_based else BytesURL
+    return result_type(scheme, netloc, url, query, fragment)
+
+
+def _make_fast_url_quote(
+    charset: str = "utf-8",
+    errors: str = "strict",
+    safe: t.Union[str, bytes] = "/:",
+    unsafe: t.Union[str, bytes] = "",
+) -> t.Callable[[bytes], str]:
+    """Precompile the translation table for a URL encoding function.
+
+    Unlike :func:`url_quote`, the generated function only takes the
+    string to quote.
+
+    :param charset: The charset to encode the result with.
+    :param errors: How to handle encoding errors.
+    :param safe: An optional sequence of safe characters to never encode.
+    :param unsafe: An optional sequence of unsafe characters to always encode.
+    """
+    if isinstance(safe, str):
+        safe = safe.encode(charset, errors)
+
+    if isinstance(unsafe, str):
+        unsafe = unsafe.encode(charset, errors)
+
+    safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe))
+    table = [chr(c) if c in safe else f"%{c:02X}" for c in range(256)]
+
+    def quote(string: bytes) -> str:
+        return "".join([table[c] for c in string])
+
+    return quote
+
+
+_fast_url_quote = _make_fast_url_quote()
+_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+")
+
+
+def _fast_url_quote_plus(string: bytes) -> str:
+    return _fast_quote_plus(string).replace(" ", "+")
+
+
+def url_quote(
+    string: t.Union[str, bytes],
+    charset: str = "utf-8",
+    errors: str = "strict",
+    safe: t.Union[str, bytes] = "/:",
+    unsafe: t.Union[str, bytes] = "",
+) -> str:
+    """URL encode a single string with a given encoding.
+
+    :param s: the string to quote.
+    :param charset: the charset to be used.
+    :param safe: an optional sequence of safe characters.
+    :param unsafe: an optional sequence of unsafe characters.
+
+    .. versionadded:: 0.9.2
+       The `unsafe` parameter was added.
+    """
+    if not isinstance(string, (str, bytes, bytearray)):
+        string = str(string)
+    if isinstance(string, str):
+        string = string.encode(charset, errors)
+    if isinstance(safe, str):
+        safe = safe.encode(charset, errors)
+    if isinstance(unsafe, str):
+        unsafe = unsafe.encode(charset, errors)
+    safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe))
+    rv = bytearray()
+    for char in bytearray(string):
+        if char in safe:
+            rv.append(char)
+        else:
+            rv.extend(_bytetohex[char])
+    return bytes(rv).decode(charset)
+
+
+def url_quote_plus(
+    string: str, charset: str = "utf-8", errors: str = "strict", safe: str = ""
+) -> str:
+    """URL encode a single string with the given encoding and convert
+    whitespace to "+".
+
+    :param s: The string to quote.
+    :param charset: The charset to be used.
+    :param safe: An optional sequence of safe characters.
+    """
+    return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+")
+
+
+def url_unparse(components: t.Tuple[str, str, str, str, str]) -> str:
+    """The reverse operation to :meth:`url_parse`.  This accepts arbitrary
+    as well as :class:`URL` tuples and returns a URL as a string.
+
+    :param components: the parsed URL as tuple which should be converted
+                       into a URL string.
+    """
+    _check_str_tuple(components)
+    scheme, netloc, path, query, fragment = components
+    s = _make_encode_wrapper(scheme)
+    url = s("")
+
+    # We generally treat file:///x and file:/x the same which is also
+    # what browsers seem to do.  This also allows us to ignore a schema
+    # register for netloc utilization or having to differentiate between
+    # empty and missing netloc.
+    if netloc or (scheme and path.startswith(s("/"))):
+        if path and path[:1] != s("/"):
+            path = s("/") + path
+        url = s("//") + (netloc or s("")) + path
+    elif path:
+        url += path
+    if scheme:
+        url = scheme + s(":") + url
+    if query:
+        url = url + s("?") + query
+    if fragment:
+        url = url + s("#") + fragment
+    return url
+
+
+def url_unquote(
+    s: t.Union[str, bytes],
+    charset: str = "utf-8",
+    errors: str = "replace",
+    unsafe: str = "",
+) -> str:
+    """URL decode a single string with a given encoding.  If the charset
+    is set to `None` no decoding is performed and raw bytes are
+    returned.
+
+    :param s: the string to unquote.
+    :param charset: the charset of the query string.  If set to `None`
+        no decoding will take place.
+    :param errors: the error handling for the charset decoding.
+    """
+    rv = _unquote_to_bytes(s, unsafe)
+    if charset is None:
+        return rv
+    return rv.decode(charset, errors)
+
+
+def url_unquote_plus(
+    s: t.Union[str, bytes], charset: str = "utf-8", errors: str = "replace"
+) -> str:
+    """URL decode a single string with the given `charset` and decode "+" to
+    whitespace.
+
+    Per default encoding errors are ignored.  If you want a different behavior
+    you can set `errors` to ``'replace'`` or ``'strict'``.
+
+    :param s: The string to unquote.
+    :param charset: the charset of the query string.  If set to `None`
+        no decoding will take place.
+    :param errors: The error handling for the `charset` decoding.
+    """
+    if isinstance(s, str):
+        s = s.replace("+", " ")
+    else:
+        s = s.replace(b"+", b" ")
+    return url_unquote(s, charset, errors)
+
+
+def url_fix(s: str, charset: str = "utf-8") -> str:
+    r"""Sometimes you get an URL by a user that just isn't a real URL because
+    it contains unsafe characters like ' ' and so on. This function can fix
+    some of the problems in a similar way browsers handle data entered by the
+    user:
+
+    >>> url_fix('http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
+    'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)'
+
+    :param s: the string with the URL to fix.
+    :param charset: The target charset for the URL if the url was given
+        as a string.
+    """
+    # First step is to switch to text processing and to convert
+    # backslashes (which are invalid in URLs anyways) to slashes.  This is
+    # consistent with what Chrome does.
+    s = _to_str(s, charset, "replace").replace("\\", "/")
+
+    # For the specific case that we look like a malformed windows URL
+    # we want to fix this up manually:
+    if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"):
+        s = f"file:///{s[7:]}"
+
+    url = url_parse(s)
+    path = url_quote(url.path, charset, safe="/%+$!*'(),")
+    qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),")
+    anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),")
+    return url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor))
+
+
+# not-unreserved characters remain quoted when unquoting to IRI
+_to_iri_unsafe = "".join([chr(c) for c in range(128) if c not in _always_safe])
+
+
+def _codec_error_url_quote(e: UnicodeError) -> t.Tuple[str, int]:
+    """Used in :func:`uri_to_iri` after unquoting to re-quote any
+    invalid bytes.
+    """
+    # the docs state that UnicodeError does have these attributes,
+    # but mypy isn't picking them up
+    out = _fast_url_quote(e.object[e.start : e.end])  # type: ignore
+    return out, e.end  # type: ignore
+
+
+codecs.register_error("werkzeug.url_quote", _codec_error_url_quote)
+
+
+def uri_to_iri(
+    uri: t.Union[str, t.Tuple[str, str, str, str, str]],
+    charset: str = "utf-8",
+    errors: str = "werkzeug.url_quote",
+) -> str:
+    """Convert a URI to an IRI. All valid UTF-8 characters are unquoted,
+    leaving all reserved and invalid characters quoted. If the URL has
+    a domain, it is decoded from Punycode.
+
+    >>> uri_to_iri("http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF")
+    'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF'
+
+    :param uri: The URI to convert.
+    :param charset: The encoding to encode unquoted bytes with.
+    :param errors: Error handler to use during ``bytes.encode``. By
+        default, invalid bytes are left quoted.
+
+    .. versionchanged:: 0.15
+        All reserved and invalid characters remain quoted. Previously,
+        only some reserved characters were preserved, and invalid bytes
+        were replaced instead of left quoted.
+
+    .. versionadded:: 0.6
+    """
+    if isinstance(uri, tuple):
+        uri = url_unparse(uri)
+
+    uri = url_parse(_to_str(uri, charset))
+    path = url_unquote(uri.path, charset, errors, _to_iri_unsafe)
+    query = url_unquote(uri.query, charset, errors, _to_iri_unsafe)
+    fragment = url_unquote(uri.fragment, charset, errors, _to_iri_unsafe)
+    return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment))
+
+
+# reserved characters remain unquoted when quoting to URI
+_to_uri_safe = ":/?#[]@!$&'()*+,;=%"
+
+
+def iri_to_uri(
+    iri: t.Union[str, t.Tuple[str, str, str, str, str]],
+    charset: str = "utf-8",
+    errors: str = "strict",
+    safe_conversion: bool = False,
+) -> str:
+    """Convert an IRI to a URI. All non-ASCII and unsafe characters are
+    quoted. If the URL has a domain, it is encoded to Punycode.
+
+    >>> iri_to_uri('http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF')
+    'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF'
+
+    :param iri: The IRI to convert.
+    :param charset: The encoding of the IRI.
+    :param errors: Error handler to use during ``bytes.encode``.
+    :param safe_conversion: Return the URL unchanged if it only contains
+        ASCII characters and no whitespace. See the explanation below.
+
+    There is a general problem with IRI conversion with some protocols
+    that are in violation of the URI specification. Consider the
+    following two IRIs::
+
+        magnet:?xt=uri:whatever
+        itms-services://?action=download-manifest
+
+    After parsing, we don't know if the scheme requires the ``//``,
+    which is dropped if empty, but conveys different meanings in the
+    final URL if it's present or not. In this case, you can use
+    ``safe_conversion``, which will return the URL unchanged if it only
+    contains ASCII characters and no whitespace. This can result in a
+    URI with unquoted characters if it was not already quoted correctly,
+    but preserves the URL's semantics. Werkzeug uses this for the
+    ``Location`` header for redirects.
+
+    .. versionchanged:: 0.15
+        All reserved characters remain unquoted. Previously, only some
+        reserved characters were left unquoted.
+
+    .. versionchanged:: 0.9.6
+       The ``safe_conversion`` parameter was added.
+
+    .. versionadded:: 0.6
+    """
+    if isinstance(iri, tuple):
+        iri = url_unparse(iri)
+
+    if safe_conversion:
+        # If we're not sure if it's safe to convert the URL, and it only
+        # contains ASCII characters, return it unconverted.
+        try:
+            native_iri = _to_str(iri)
+            ascii_iri = native_iri.encode("ascii")
+
+            # Only return if it doesn't have whitespace. (Why?)
+            if len(ascii_iri.split()) == 1:
+                return native_iri
+        except UnicodeError:
+            pass
+
+    iri = url_parse(_to_str(iri, charset, errors))
+    path = url_quote(iri.path, charset, errors, _to_uri_safe)
+    query = url_quote(iri.query, charset, errors, _to_uri_safe)
+    fragment = url_quote(iri.fragment, charset, errors, _to_uri_safe)
+    return url_unparse((iri.scheme, iri.encode_netloc(), path, query, fragment))
+
+
+def url_decode(
+    s: t.AnyStr,
+    charset: str = "utf-8",
+    decode_keys: None = None,
+    include_empty: bool = True,
+    errors: str = "replace",
+    separator: str = "&",
+    cls: t.Optional[t.Type["ds.MultiDict"]] = None,
+) -> "ds.MultiDict[str, str]":
+    """Parse a query string and return it as a :class:`MultiDict`.
+
+    :param s: The query string to parse.
+    :param charset: Decode bytes to string with this charset. If not
+        given, bytes are returned as-is.
+    :param include_empty: Include keys with empty values in the dict.
+    :param errors: Error handling behavior when decoding bytes.
+    :param separator: Separator character between pairs.
+    :param cls: Container to hold result instead of :class:`MultiDict`.
+
+    .. versionchanged:: 2.0
+        The ``decode_keys`` parameter is deprecated and will be removed
+        in Werkzeug 2.1.
+
+    .. versionchanged:: 0.5
+        In previous versions ";" and "&" could be used for url decoding.
+        Now only "&" is supported. If you want to use ";", a different
+        ``separator`` can be provided.
+
+    .. versionchanged:: 0.5
+        The ``cls`` parameter was added.
+    """
+    if decode_keys is not None:
+        warnings.warn(
+            "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+    if cls is None:
+        from .datastructures import MultiDict  # noqa: F811
+
+        cls = MultiDict
+    if isinstance(s, str) and not isinstance(separator, str):
+        separator = separator.decode(charset or "ascii")
+    elif isinstance(s, bytes) and not isinstance(separator, bytes):
+        separator = separator.encode(charset or "ascii")  # type: ignore
+    return cls(
+        _url_decode_impl(
+            s.split(separator), charset, include_empty, errors  # type: ignore
+        )
+    )
+
+
+def url_decode_stream(
+    stream: t.BinaryIO,
+    charset: str = "utf-8",
+    decode_keys: None = None,
+    include_empty: bool = True,
+    errors: str = "replace",
+    separator: bytes = b"&",
+    cls: t.Optional[t.Type["ds.MultiDict"]] = None,
+    limit: t.Optional[int] = None,
+    return_iterator: bool = False,
+) -> "ds.MultiDict[str, str]":
+    """Works like :func:`url_decode` but decodes a stream.  The behavior
+    of stream and limit follows functions like
+    :func:`~werkzeug.wsgi.make_line_iter`.  The generator of pairs is
+    directly fed to the `cls` so you can consume the data while it's
+    parsed.
+
+    :param stream: a stream with the encoded querystring
+    :param charset: the charset of the query string.  If set to `None`
+        no decoding will take place.
+    :param include_empty: Set to `False` if you don't want empty values to
+                          appear in the dict.
+    :param errors: the decoding error behavior.
+    :param separator: the pair separator to be used, defaults to ``&``
+    :param cls: an optional dict class to use.  If this is not specified
+                       or `None` the default :class:`MultiDict` is used.
+    :param limit: the content length of the URL data.  Not necessary if
+                  a limited stream is provided.
+
+    .. versionchanged:: 2.0
+        The ``decode_keys`` and ``return_iterator`` parameters are
+        deprecated and will be removed in Werkzeug 2.1.
+
+    .. versionadded:: 0.8
+    """
+    from .wsgi import make_chunk_iter
+
+    if decode_keys is not None:
+        warnings.warn(
+            "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+    pair_iter = make_chunk_iter(stream, separator, limit)
+    decoder = _url_decode_impl(pair_iter, charset, include_empty, errors)
+
+    if return_iterator:
+        warnings.warn(
+            "'return_iterator' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return decoder  # type: ignore
+
+    if cls is None:
+        from .datastructures import MultiDict  # noqa: F811
+
+        cls = MultiDict
+
+    return cls(decoder)
+
+
+def _url_decode_impl(
+    pair_iter: t.Iterable[t.AnyStr], charset: str, include_empty: bool, errors: str
+) -> t.Iterator[t.Tuple[str, str]]:
+    for pair in pair_iter:
+        if not pair:
+            continue
+        s = _make_encode_wrapper(pair)
+        equal = s("=")
+        if equal in pair:
+            key, value = pair.split(equal, 1)
+        else:
+            if not include_empty:
+                continue
+            key = pair
+            value = s("")
+        yield (
+            url_unquote_plus(key, charset, errors),
+            url_unquote_plus(value, charset, errors),
+        )
+
+
+def url_encode(
+    obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]],
+    charset: str = "utf-8",
+    encode_keys: None = None,
+    sort: bool = False,
+    key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None,
+    separator: str = "&",
+) -> str:
+    """URL encode a dict/`MultiDict`.  If a value is `None` it will not appear
+    in the result string.  Per default only values are encoded into the target
+    charset strings.
+
+    :param obj: the object to encode into a query string.
+    :param charset: the charset of the query string.
+    :param sort: set to `True` if you want parameters to be sorted by `key`.
+    :param separator: the separator to be used for the pairs.
+    :param key: an optional function to be used for sorting.  For more details
+                check out the :func:`sorted` documentation.
+
+    .. versionchanged:: 2.0
+        The ``encode_keys`` parameter is deprecated and will be removed
+        in Werkzeug 2.1.
+
+    .. versionchanged:: 0.5
+        Added the ``sort``, ``key``, and ``separator`` parameters.
+    """
+    if encode_keys is not None:
+        warnings.warn(
+            "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+    separator = _to_str(separator, "ascii")
+    return separator.join(_url_encode_impl(obj, charset, sort, key))
+
+
+def url_encode_stream(
+    obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]],
+    stream: t.Optional[t.TextIO] = None,
+    charset: str = "utf-8",
+    encode_keys: None = None,
+    sort: bool = False,
+    key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None,
+    separator: str = "&",
+) -> None:
+    """Like :meth:`url_encode` but writes the results to a stream
+    object.  If the stream is `None` a generator over all encoded
+    pairs is returned.
+
+    :param obj: the object to encode into a query string.
+    :param stream: a stream to write the encoded object into or `None` if
+                   an iterator over the encoded pairs should be returned.  In
+                   that case the separator argument is ignored.
+    :param charset: the charset of the query string.
+    :param sort: set to `True` if you want parameters to be sorted by `key`.
+    :param separator: the separator to be used for the pairs.
+    :param key: an optional function to be used for sorting.  For more details
+                check out the :func:`sorted` documentation.
+
+    .. versionchanged:: 2.0
+        The ``encode_keys`` parameter is deprecated and will be removed
+        in Werkzeug 2.1.
+
+    .. versionadded:: 0.8
+    """
+    if encode_keys is not None:
+        warnings.warn(
+            "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+    separator = _to_str(separator, "ascii")
+    gen = _url_encode_impl(obj, charset, sort, key)
+    if stream is None:
+        return gen  # type: ignore
+    for idx, chunk in enumerate(gen):
+        if idx:
+            stream.write(separator)
+        stream.write(chunk)
+    return None
+
+
+def url_join(
+    base: t.Union[str, t.Tuple[str, str, str, str, str]],
+    url: t.Union[str, t.Tuple[str, str, str, str, str]],
+    allow_fragments: bool = True,
+) -> str:
+    """Join a base URL and a possibly relative URL to form an absolute
+    interpretation of the latter.
+
+    :param base: the base URL for the join operation.
+    :param url: the URL to join.
+    :param allow_fragments: indicates whether fragments should be allowed.
+    """
+    if isinstance(base, tuple):
+        base = url_unparse(base)
+    if isinstance(url, tuple):
+        url = url_unparse(url)
+
+    _check_str_tuple((base, url))
+    s = _make_encode_wrapper(base)
+
+    if not base:
+        return url
+    if not url:
+        return base
+
+    bscheme, bnetloc, bpath, bquery, bfragment = url_parse(
+        base, allow_fragments=allow_fragments
+    )
+    scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments)
+    if scheme != bscheme:
+        return url
+    if netloc:
+        return url_unparse((scheme, netloc, path, query, fragment))
+    netloc = bnetloc
+
+    if path[:1] == s("/"):
+        segments = path.split(s("/"))
+    elif not path:
+        segments = bpath.split(s("/"))
+        if not query:
+            query = bquery
+    else:
+        segments = bpath.split(s("/"))[:-1] + path.split(s("/"))
+
+    # If the rightmost part is "./" we want to keep the slash but
+    # remove the dot.
+    if segments[-1] == s("."):
+        segments[-1] = s("")
+
+    # Resolve ".." and "."
+    segments = [segment for segment in segments if segment != s(".")]
+    while True:
+        i = 1
+        n = len(segments) - 1
+        while i < n:
+            if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")):
+                del segments[i - 1 : i + 1]
+                break
+            i += 1
+        else:
+            break
+
+    # Remove trailing ".." if the URL is absolute
+    unwanted_marker = [s(""), s("..")]
+    while segments[:2] == unwanted_marker:
+        del segments[1]
+
+    path = s("/").join(segments)
+    return url_unparse((scheme, netloc, path, query, fragment))
+
+
+class Href:
+    """Implements a callable that constructs URLs with the given base. The
+    function can be called with any number of positional and keyword
+    arguments which than are used to assemble the URL.  Works with URLs
+    and posix paths.
+
+    Positional arguments are appended as individual segments to
+    the path of the URL:
+
+    >>> href = Href('/foo')
+    >>> href('bar', 23)
+    '/foo/bar/23'
+    >>> href('foo', bar=23)
+    '/foo/foo?bar=23'
+
+    If any of the arguments (positional or keyword) evaluates to `None` it
+    will be skipped.  If no keyword arguments are given the last argument
+    can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass),
+    otherwise the keyword arguments are used for the query parameters, cutting
+    off the first trailing underscore of the parameter name:
+
+    >>> href(is_=42)
+    '/foo?is=42'
+    >>> href({'foo': 'bar'})
+    '/foo?foo=bar'
+
+    Combining of both methods is not allowed:
+
+    >>> href({'foo': 'bar'}, bar=42)
+    Traceback (most recent call last):
+      ...
+    TypeError: keyword arguments and query-dicts can't be combined
+
+    Accessing attributes on the href object creates a new href object with
+    the attribute name as prefix:
+
+    >>> bar_href = href.bar
+    >>> bar_href("blub")
+    '/foo/bar/blub'
+
+    If `sort` is set to `True` the items are sorted by `key` or the default
+    sorting algorithm:
+
+    >>> href = Href("/", sort=True)
+    >>> href(a=1, b=2, c=3)
+    '/?a=1&b=2&c=3'
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :mod:`werkzeug.routing`
+        instead.
+
+    .. versionadded:: 0.5
+        `sort` and `key` were added.
+    """
+
+    def __init__(  # type: ignore
+        self, base="./", charset="utf-8", sort=False, key=None
+    ):
+        warnings.warn(
+            "'Href' is deprecated and will be removed in Werkzeug 2.1."
+            " Use 'werkzeug.routing' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+        if not base:
+            base = "./"
+        self.base = base
+        self.charset = charset
+        self.sort = sort
+        self.key = key
+
+    def __getattr__(self, name):  # type: ignore
+        if name[:2] == "__":
+            raise AttributeError(name)
+        base = self.base
+        if base[-1:] != "/":
+            base += "/"
+        return Href(url_join(base, name), self.charset, self.sort, self.key)
+
+    def __call__(self, *path, **query):  # type: ignore
+        if path and isinstance(path[-1], dict):
+            if query:
+                raise TypeError("keyword arguments and query-dicts can't be combined")
+            query, path = path[-1], path[:-1]
+        elif query:
+            query = {k[:-1] if k.endswith("_") else k: v for k, v in query.items()}
+        path = "/".join(
+            [
+                _to_str(url_quote(x, self.charset), "ascii")
+                for x in path
+                if x is not None
+            ]
+        ).lstrip("/")
+        rv = self.base
+        if path:
+            if not rv.endswith("/"):
+                rv += "/"
+            rv = url_join(rv, f"./{path}")
+        if query:
+            rv += "?" + _to_str(
+                url_encode(query, self.charset, sort=self.sort, key=self.key), "ascii"
+            )
+        return rv
diff --git a/venv/lib/python3.7/site-packages/werkzeug/user_agent.py b/venv/lib/python3.7/site-packages/werkzeug/user_agent.py
new file mode 100644
index 00000000..66ffcbe0
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/user_agent.py
@@ -0,0 +1,47 @@
+import typing as t
+
+
+class UserAgent:
+    """Represents a parsed user agent header value.
+
+    The default implementation does no parsing, only the :attr:`string`
+    attribute is set. A subclass may parse the string to set the
+    common attributes or expose other information. Set
+    :attr:`werkzeug.wrappers.Request.user_agent_class` to use a
+    subclass.
+
+    :param string: The header value to parse.
+
+    .. versionadded:: 2.0
+        This replaces the previous ``useragents`` module, but does not
+        provide a built-in parser.
+    """
+
+    platform: t.Optional[str] = None
+    """The OS name, if it could be parsed from the string."""
+
+    browser: t.Optional[str] = None
+    """The browser name, if it could be parsed from the string."""
+
+    version: t.Optional[str] = None
+    """The browser version, if it could be parsed from the string."""
+
+    language: t.Optional[str] = None
+    """The browser language, if it could be parsed from the string."""
+
+    def __init__(self, string: str) -> None:
+        self.string: str = string
+        """The original header value."""
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.browser}/{self.version}>"
+
+    def __str__(self) -> str:
+        return self.string
+
+    def __bool__(self) -> bool:
+        return bool(self.browser)
+
+    def to_header(self) -> str:
+        """Convert to a header value."""
+        return self.string
diff --git a/venv/lib/python3.7/site-packages/werkzeug/useragents.py b/venv/lib/python3.7/site-packages/werkzeug/useragents.py
new file mode 100644
index 00000000..4deed8f4
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/useragents.py
@@ -0,0 +1,215 @@
+import re
+import typing as t
+import warnings
+
+from .user_agent import UserAgent as _BaseUserAgent
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class _UserAgentParser:
+    platform_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = (
+        (" cros ", "chromeos"),
+        ("iphone|ios", "iphone"),
+        ("ipad", "ipad"),
+        (r"darwin\b|mac\b|os\s*x", "macos"),
+        ("win", "windows"),
+        (r"android", "android"),
+        ("netbsd", "netbsd"),
+        ("openbsd", "openbsd"),
+        ("freebsd", "freebsd"),
+        ("dragonfly", "dragonflybsd"),
+        ("(sun|i86)os", "solaris"),
+        (r"x11\b|lin(\b|ux)?", "linux"),
+        (r"nintendo\s+wii", "wii"),
+        ("irix", "irix"),
+        ("hp-?ux", "hpux"),
+        ("aix", "aix"),
+        ("sco|unix_sv", "sco"),
+        ("bsd", "bsd"),
+        ("amiga", "amiga"),
+        ("blackberry|playbook", "blackberry"),
+        ("symbian", "symbian"),
+    )
+    browser_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = (
+        ("googlebot", "google"),
+        ("msnbot", "msn"),
+        ("yahoo", "yahoo"),
+        ("ask jeeves", "ask"),
+        (r"aol|america\s+online\s+browser", "aol"),
+        (r"opera|opr", "opera"),
+        ("edge|edg", "edge"),
+        ("chrome|crios", "chrome"),
+        ("seamonkey", "seamonkey"),
+        ("firefox|firebird|phoenix|iceweasel", "firefox"),
+        ("galeon", "galeon"),
+        ("safari|version", "safari"),
+        ("webkit", "webkit"),
+        ("camino", "camino"),
+        ("konqueror", "konqueror"),
+        ("k-meleon", "kmeleon"),
+        ("netscape", "netscape"),
+        (r"msie|microsoft\s+internet\s+explorer|trident/.+? rv:", "msie"),
+        ("lynx", "lynx"),
+        ("links", "links"),
+        ("Baiduspider", "baidu"),
+        ("bingbot", "bing"),
+        ("mozilla", "mozilla"),
+    )
+
+    _browser_version_re = r"(?:{pattern})[/\sa-z(]*(\d+[.\da-z]+)?"
+    _language_re = re.compile(
+        r"(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|"
+        r"(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)"
+    )
+
+    def __init__(self) -> None:
+        self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platform_rules]
+        self.browsers = [
+            (b, re.compile(self._browser_version_re.format(pattern=a), re.I))
+            for a, b in self.browser_rules
+        ]
+
+    def __call__(
+        self, user_agent: str
+    ) -> t.Tuple[t.Optional[str], t.Optional[str], t.Optional[str], t.Optional[str]]:
+        platform: t.Optional[str]
+        browser: t.Optional[str]
+        version: t.Optional[str]
+        language: t.Optional[str]
+
+        for platform, regex in self.platforms:  # noqa: B007
+            match = regex.search(user_agent)
+            if match is not None:
+                break
+        else:
+            platform = None
+
+        # Except for Trident, all browser key words come after the last ')'
+        last_closing_paren = 0
+        if (
+            not re.compile(r"trident/.+? rv:", re.I).search(user_agent)
+            and ")" in user_agent
+            and user_agent[-1] != ")"
+        ):
+            last_closing_paren = user_agent.rindex(")")
+
+        for browser, regex in self.browsers:  # noqa: B007
+            match = regex.search(user_agent[last_closing_paren:])
+            if match is not None:
+                version = match.group(1)
+                break
+        else:
+            browser = version = None
+        match = self._language_re.search(user_agent)
+        if match is not None:
+            language = match.group(1) or match.group(2)
+        else:
+            language = None
+        return platform, browser, version, language
+
+
+# It wasn't public, but users might have imported it anyway, show a
+# warning if a user created an instance.
+class UserAgentParser(_UserAgentParser):
+    """A simple user agent parser.  Used by the `UserAgent`.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use a dedicated parser library
+        instead.
+    """
+
+    def __init__(self) -> None:
+        warnings.warn(
+            "'UserAgentParser' is deprecated and will be removed in"
+            " Werkzeug 2.1. Use a dedicated parser library instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__()
+
+
+class _deprecated_property(property):
+    def __init__(self, fget: t.Callable[["_UserAgent"], t.Any]) -> None:
+        super().__init__(fget)
+        self.message = (
+            "The built-in user agent parser is deprecated and will be"
+            f" removed in Werkzeug 2.1. The {fget.__name__!r} property"
+            " will be 'None'. Subclass 'werkzeug.user_agent.UserAgent'"
+            " and set 'Request.user_agent_class' to use a different"
+            " parser."
+        )
+
+    def __get__(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        warnings.warn(self.message, DeprecationWarning, stacklevel=3)
+        return super().__get__(*args, **kwargs)
+
+
+# This is what Request.user_agent returns for now, only show warnings on
+# attribute access, not creation.
+class _UserAgent(_BaseUserAgent):
+    _parser = _UserAgentParser()
+
+    def __init__(self, string: str) -> None:
+        super().__init__(string)
+        info = self._parser(string)
+        self._platform, self._browser, self._version, self._language = info
+
+    @_deprecated_property
+    def platform(self) -> t.Optional[str]:  # type: ignore
+        return self._platform
+
+    @_deprecated_property
+    def browser(self) -> t.Optional[str]:  # type: ignore
+        return self._browser
+
+    @_deprecated_property
+    def version(self) -> t.Optional[str]:  # type: ignore
+        return self._version
+
+    @_deprecated_property
+    def language(self) -> t.Optional[str]:  # type: ignore
+        return self._language
+
+
+# This is what users might be importing, show warnings on create.
+class UserAgent(_UserAgent):
+    """Represents a parsed user agent header value.
+
+    This uses a basic parser to try to extract some information from the
+    header.
+
+    :param environ_or_string: The header value to parse, or a WSGI
+        environ containing the header.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Subclass
+        :class:`werkzeug.user_agent.UserAgent` (note the new module
+        name) to use a dedicated parser instead.
+
+    .. versionchanged:: 2.0
+        Passing a WSGI environ is deprecated and will be removed in 2.1.
+    """
+
+    def __init__(self, environ_or_string: "t.Union[str, WSGIEnvironment]") -> None:
+        if isinstance(environ_or_string, dict):
+            warnings.warn(
+                "Passing an environ to 'UserAgent' is deprecated and"
+                " will be removed in Werkzeug 2.1. Pass the header"
+                " value string instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            string = environ_or_string.get("HTTP_USER_AGENT", "")
+        else:
+            string = environ_or_string
+
+        warnings.warn(
+            "The 'werkzeug.useragents' module is deprecated and will be"
+            " removed in Werkzeug 2.1. The new base API is"
+            " 'werkzeug.user_agent.UserAgent'.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(string)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/utils.py b/venv/lib/python3.7/site-packages/werkzeug/utils.py
new file mode 100644
index 00000000..7bb02bbc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/utils.py
@@ -0,0 +1,1091 @@
+import codecs
+import io
+import mimetypes
+import os
+import pkgutil
+import re
+import sys
+import typing as t
+import unicodedata
+import warnings
+from datetime import datetime
+from html.entities import name2codepoint
+from time import time
+from zlib import adler32
+
+from ._internal import _DictAccessorProperty
+from ._internal import _missing
+from ._internal import _parse_signature
+from ._internal import _TAccessorValue
+from .datastructures import Headers
+from .exceptions import NotFound
+from .exceptions import RequestedRangeNotSatisfiable
+from .security import safe_join
+from .urls import url_quote
+from .wsgi import wrap_file
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import WSGIEnvironment
+    from .wrappers.request import Request
+    from .wrappers.response import Response
+
+_T = t.TypeVar("_T")
+
+_entity_re = re.compile(r"&([^;]+);")
+_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]")
+_windows_device_files = (
+    "CON",
+    "AUX",
+    "COM1",
+    "COM2",
+    "COM3",
+    "COM4",
+    "LPT1",
+    "LPT2",
+    "LPT3",
+    "PRN",
+    "NUL",
+)
+
+
+class cached_property(property, t.Generic[_T]):
+    """A :func:`property` that is only evaluated once. Subsequent access
+    returns the cached value. Setting the property sets the cached
+    value. Deleting the property clears the cached value, accessing it
+    again will evaluate it again.
+
+    .. code-block:: python
+
+        class Example:
+            @cached_property
+            def value(self):
+                # calculate something important here
+                return 42
+
+        e = Example()
+        e.value  # evaluates
+        e.value  # uses cache
+        e.value = 16  # sets cache
+        del e.value  # clears cache
+
+    The class must have a ``__dict__`` for this to work.
+
+    .. versionchanged:: 2.0
+        ``del obj.name`` clears the cached value.
+    """
+
+    def __init__(
+        self,
+        fget: t.Callable[[t.Any], _T],
+        name: t.Optional[str] = None,
+        doc: t.Optional[str] = None,
+    ) -> None:
+        super().__init__(fget, doc=doc)
+        self.__name__ = name or fget.__name__
+        self.__module__ = fget.__module__
+
+    def __set__(self, obj: object, value: _T) -> None:
+        obj.__dict__[self.__name__] = value
+
+    def __get__(self, obj: object, type: type = None) -> _T:  # type: ignore
+        if obj is None:
+            return self  # type: ignore
+
+        value: _T = obj.__dict__.get(self.__name__, _missing)
+
+        if value is _missing:
+            value = self.fget(obj)  # type: ignore
+            obj.__dict__[self.__name__] = value
+
+        return value
+
+    def __delete__(self, obj: object) -> None:
+        del obj.__dict__[self.__name__]
+
+
+def invalidate_cached_property(obj: object, name: str) -> None:
+    """Invalidates the cache for a :class:`cached_property`:
+
+    >>> class Test(object):
+    ...     @cached_property
+    ...     def magic_number(self):
+    ...         print("recalculating...")
+    ...         return 42
+    ...
+    >>> var = Test()
+    >>> var.magic_number
+    recalculating...
+    42
+    >>> var.magic_number
+    42
+    >>> invalidate_cached_property(var, "magic_number")
+    >>> var.magic_number
+    recalculating...
+    42
+
+    You must pass the name of the cached property as the second argument.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use ``del obj.name`` instead.
+    """
+    warnings.warn(
+        "'invalidate_cached_property' is deprecated and will be removed"
+        " in Werkzeug 2.1. Use 'del obj.name' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    delattr(obj, name)
+
+
+class environ_property(_DictAccessorProperty[_TAccessorValue]):
+    """Maps request attributes to environment variables. This works not only
+    for the Werkzeug request object, but also any other class with an
+    environ attribute:
+
+    >>> class Test(object):
+    ...     environ = {'key': 'value'}
+    ...     test = environ_property('key')
+    >>> var = Test()
+    >>> var.test
+    'value'
+
+    If you pass it a second value it's used as default if the key does not
+    exist, the third one can be a converter that takes a value and converts
+    it.  If it raises :exc:`ValueError` or :exc:`TypeError` the default value
+    is used. If no default value is provided `None` is used.
+
+    Per default the property is read only.  You have to explicitly enable it
+    by passing ``read_only=False`` to the constructor.
+    """
+
+    read_only = True
+
+    def lookup(self, obj: "Request") -> "WSGIEnvironment":
+        return obj.environ
+
+
+class header_property(_DictAccessorProperty[_TAccessorValue]):
+    """Like `environ_property` but for headers."""
+
+    def lookup(self, obj: t.Union["Request", "Response"]) -> Headers:
+        return obj.headers
+
+
+class HTMLBuilder:
+    """Helper object for HTML generation.
+
+    Per default there are two instances of that class.  The `html` one, and
+    the `xhtml` one for those two dialects.  The class uses keyword parameters
+    and positional parameters to generate small snippets of HTML.
+
+    Keyword parameters are converted to XML/SGML attributes, positional
+    arguments are used as children.  Because Python accepts positional
+    arguments before keyword arguments it's a good idea to use a list with the
+    star-syntax for some children:
+
+    >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ',
+    ...                        html.a('bar', href='bar.html')])
+    '<p class="foo"><a href="foo.html">foo</a> <a href="bar.html">bar</a></p>'
+
+    This class works around some browser limitations and can not be used for
+    arbitrary SGML/XML generation.  For that purpose lxml and similar
+    libraries exist.
+
+    Calling the builder escapes the string passed:
+
+    >>> html.p(html("<foo>"))
+    '<p>&lt;foo&gt;</p>'
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1.
+    """
+
+    _entity_re = re.compile(r"&([^;]+);")
+    _entities = name2codepoint.copy()
+    _entities["apos"] = 39
+    _empty_elements = {
+        "area",
+        "base",
+        "basefont",
+        "br",
+        "col",
+        "command",
+        "embed",
+        "frame",
+        "hr",
+        "img",
+        "input",
+        "keygen",
+        "isindex",
+        "link",
+        "meta",
+        "param",
+        "source",
+        "wbr",
+    }
+    _boolean_attributes = {
+        "selected",
+        "checked",
+        "compact",
+        "declare",
+        "defer",
+        "disabled",
+        "ismap",
+        "multiple",
+        "nohref",
+        "noresize",
+        "noshade",
+        "nowrap",
+    }
+    _plaintext_elements = {"textarea"}
+    _c_like_cdata = {"script", "style"}
+
+    def __init__(self, dialect):  # type: ignore
+        self._dialect = dialect
+
+    def __call__(self, s):  # type: ignore
+        import html
+
+        warnings.warn(
+            "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return html.escape(s)
+
+    def __getattr__(self, tag):  # type: ignore
+        import html
+
+        warnings.warn(
+            "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        if tag[:2] == "__":
+            raise AttributeError(tag)
+
+        def proxy(*children, **arguments):  # type: ignore
+            buffer = f"<{tag}"
+            for key, value in arguments.items():
+                if value is None:
+                    continue
+                if key[-1] == "_":
+                    key = key[:-1]
+                if key in self._boolean_attributes:
+                    if not value:
+                        continue
+                    if self._dialect == "xhtml":
+                        value = f'="{key}"'
+                    else:
+                        value = ""
+                else:
+                    value = f'="{html.escape(value)}"'
+                buffer += f" {key}{value}"
+            if not children and tag in self._empty_elements:
+                if self._dialect == "xhtml":
+                    buffer += " />"
+                else:
+                    buffer += ">"
+                return buffer
+            buffer += ">"
+
+            children_as_string = "".join([str(x) for x in children if x is not None])
+
+            if children_as_string:
+                if tag in self._plaintext_elements:
+                    children_as_string = html.escape(children_as_string)
+                elif tag in self._c_like_cdata and self._dialect == "xhtml":
+                    children_as_string = f"/*<![CDATA[*/{children_as_string}/*]]>*/"
+            buffer += children_as_string + f"</{tag}>"
+            return buffer
+
+        return proxy
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} for {self._dialect!r}>"
+
+
+html = HTMLBuilder("html")
+xhtml = HTMLBuilder("xhtml")
+
+# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in
+# https://www.iana.org/assignments/media-types/media-types.xhtml
+# Types listed in the XDG mime info that have a charset in the IANA registration.
+_charset_mimetypes = {
+    "application/ecmascript",
+    "application/javascript",
+    "application/sql",
+    "application/xml",
+    "application/xml-dtd",
+    "application/xml-external-parsed-entity",
+}
+
+
+def get_content_type(mimetype: str, charset: str) -> str:
+    """Returns the full content type string with charset for a mimetype.
+
+    If the mimetype represents text, the charset parameter will be
+    appended, otherwise the mimetype is returned unchanged.
+
+    :param mimetype: The mimetype to be used as content type.
+    :param charset: The charset to be appended for text mimetypes.
+    :return: The content type.
+
+    .. versionchanged:: 0.15
+        Any type that ends with ``+xml`` gets a charset, not just those
+        that start with ``application/``. Known text types such as
+        ``application/javascript`` are also given charsets.
+    """
+    if (
+        mimetype.startswith("text/")
+        or mimetype in _charset_mimetypes
+        or mimetype.endswith("+xml")
+    ):
+        mimetype += f"; charset={charset}"
+
+    return mimetype
+
+
+def detect_utf_encoding(data: bytes) -> str:
+    """Detect which UTF encoding was used to encode the given bytes.
+
+    The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is
+    accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big
+    or little endian. Some editors or libraries may prepend a BOM.
+
+    :internal:
+
+    :param data: Bytes in unknown UTF encoding.
+    :return: UTF encoding name
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. This is built in to
+        :func:`json.loads`.
+
+    .. versionadded:: 0.15
+    """
+    warnings.warn(
+        "'detect_utf_encoding' is deprecated and will be removed in"
+        " Werkzeug 2.1. This is built in to 'json.loads'.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    head = data[:4]
+
+    if head[:3] == codecs.BOM_UTF8:
+        return "utf-8-sig"
+
+    if b"\x00" not in head:
+        return "utf-8"
+
+    if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE):
+        return "utf-32"
+
+    if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE):
+        return "utf-16"
+
+    if len(head) == 4:
+        if head[:3] == b"\x00\x00\x00":
+            return "utf-32-be"
+
+        if head[::2] == b"\x00\x00":
+            return "utf-16-be"
+
+        if head[1:] == b"\x00\x00\x00":
+            return "utf-32-le"
+
+        if head[1::2] == b"\x00\x00":
+            return "utf-16-le"
+
+    if len(head) == 2:
+        return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le"
+
+    return "utf-8"
+
+
+def format_string(string: str, context: t.Mapping[str, t.Any]) -> str:
+    """String-template format a string:
+
+    >>> format_string('$foo and ${foo}s', dict(foo=42))
+    '42 and 42s'
+
+    This does not do any attribute lookup.
+
+    :param string: the format string.
+    :param context: a dict with the variables to insert.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :class:`string.Template`
+        instead.
+    """
+    from string import Template
+
+    warnings.warn(
+        "'utils.format_string' is deprecated and will be removed in"
+        " Werkzeug 2.1. Use 'string.Template' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return Template(string).substitute(context)
+
+
+def secure_filename(filename: str) -> str:
+    r"""Pass it a filename and it will return a secure version of it.  This
+    filename can then safely be stored on a regular file system and passed
+    to :func:`os.path.join`.  The filename returned is an ASCII only string
+    for maximum portability.
+
+    On windows systems the function also makes sure that the file is not
+    named after one of the special device files.
+
+    >>> secure_filename("My cool movie.mov")
+    'My_cool_movie.mov'
+    >>> secure_filename("../../../etc/passwd")
+    'etc_passwd'
+    >>> secure_filename('i contain cool \xfcml\xe4uts.txt')
+    'i_contain_cool_umlauts.txt'
+
+    The function might return an empty filename.  It's your responsibility
+    to ensure that the filename is unique and that you abort or
+    generate a random filename if the function returned an empty one.
+
+    .. versionadded:: 0.5
+
+    :param filename: the filename to secure
+    """
+    filename = unicodedata.normalize("NFKD", filename)
+    filename = filename.encode("ascii", "ignore").decode("ascii")
+
+    for sep in os.path.sep, os.path.altsep:
+        if sep:
+            filename = filename.replace(sep, " ")
+    filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip(
+        "._"
+    )
+
+    # on nt a couple of special files are present in each folder.  We
+    # have to ensure that the target file is not such a filename.  In
+    # this case we prepend an underline
+    if (
+        os.name == "nt"
+        and filename
+        and filename.split(".")[0].upper() in _windows_device_files
+    ):
+        filename = f"_{filename}"
+
+    return filename
+
+
+def escape(s: t.Any) -> str:
+    """Replace ``&``, ``<``, ``>``, ``"``, and ``'`` with HTML-safe
+    sequences.
+
+    ``None`` is escaped to an empty string.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use MarkupSafe instead.
+    """
+    import html
+
+    warnings.warn(
+        "'utils.escape' is deprecated and will be removed in Werkzeug"
+        " 2.1. Use MarkupSafe instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if s is None:
+        return ""
+
+    if hasattr(s, "__html__"):
+        return s.__html__()  # type: ignore
+
+    if not isinstance(s, str):
+        s = str(s)
+
+    return html.escape(s, quote=True)  # type: ignore
+
+
+def unescape(s: str) -> str:
+    """The reverse of :func:`escape`. This unescapes all the HTML
+    entities, not only those inserted by ``escape``.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use MarkupSafe instead.
+    """
+    import html
+
+    warnings.warn(
+        "'utils.unescape' is deprecated and will be removed in Werkzueg"
+        " 2.1. Use MarkupSafe instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return html.unescape(s)
+
+
+def redirect(
+    location: str, code: int = 302, Response: t.Optional[t.Type["Response"]] = None
+) -> "Response":
+    """Returns a response object (a WSGI application) that, if called,
+    redirects the client to the target location. Supported codes are
+    301, 302, 303, 305, 307, and 308. 300 is not supported because
+    it's not a real redirect and 304 because it's the answer for a
+    request with a request with defined If-Modified-Since headers.
+
+    .. versionadded:: 0.6
+       The location can now be a unicode string that is encoded using
+       the :func:`iri_to_uri` function.
+
+    .. versionadded:: 0.10
+        The class used for the Response object can now be passed in.
+
+    :param location: the location the response should redirect to.
+    :param code: the redirect status code. defaults to 302.
+    :param class Response: a Response class to use when instantiating a
+        response. The default is :class:`werkzeug.wrappers.Response` if
+        unspecified.
+    """
+    import html
+
+    if Response is None:
+        from .wrappers import Response  # type: ignore
+
+    display_location = html.escape(location)
+    if isinstance(location, str):
+        # Safe conversion is necessary here as we might redirect
+        # to a broken URI scheme (for instance itms-services).
+        from .urls import iri_to_uri
+
+        location = iri_to_uri(location, safe_conversion=True)
+    response = Response(  # type: ignore
+        '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
+        "<title>Redirecting...</title>\n"
+        "<h1>Redirecting...</h1>\n"
+        "<p>You should be redirected automatically to target URL: "
+        f'<a href="{html.escape(location)}">{display_location}</a>. If'
+        " not click the link.",
+        code,
+        mimetype="text/html",
+    )
+    response.headers["Location"] = location
+    return response
+
+
+def append_slash_redirect(environ: "WSGIEnvironment", code: int = 301) -> "Response":
+    """Redirects to the same URL but with a slash appended.  The behavior
+    of this function is undefined if the path ends with a slash already.
+
+    :param environ: the WSGI environment for the request that triggers
+                    the redirect.
+    :param code: the status code for the redirect.
+    """
+    new_path = environ["PATH_INFO"].strip("/") + "/"
+    query_string = environ.get("QUERY_STRING")
+    if query_string:
+        new_path += f"?{query_string}"
+    return redirect(new_path, code)
+
+
+def send_file(
+    path_or_file: t.Union[os.PathLike, str, t.BinaryIO],
+    environ: "WSGIEnvironment",
+    mimetype: t.Optional[str] = None,
+    as_attachment: bool = False,
+    download_name: t.Optional[str] = None,
+    conditional: bool = True,
+    etag: t.Union[bool, str] = True,
+    last_modified: t.Optional[t.Union[datetime, int, float]] = None,
+    max_age: t.Optional[
+        t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]]
+    ] = None,
+    use_x_sendfile: bool = False,
+    response_class: t.Optional[t.Type["Response"]] = None,
+    _root_path: t.Optional[t.Union[os.PathLike, str]] = None,
+) -> "Response":
+    """Send the contents of a file to the client.
+
+    The first argument can be a file path or a file-like object. Paths
+    are preferred in most cases because Werkzeug can manage the file and
+    get extra information from the path. Passing a file-like object
+    requires that the file is opened in binary mode, and is mostly
+    useful when building a file in memory with :class:`io.BytesIO`.
+
+    Never pass file paths provided by a user. The path is assumed to be
+    trusted, so a user could craft a path to access a file you didn't
+    intend.
+
+    If the WSGI server sets a ``file_wrapper`` in ``environ``, it is
+    used, otherwise Werkzeug's built-in wrapper is used. Alternatively,
+    if the HTTP server supports ``X-Sendfile``, ``use_x_sendfile=True``
+    will tell the server to send the given path, which is much more
+    efficient than reading it in Python.
+
+    :param path_or_file: The path to the file to send, relative to the
+        current working directory if a relative path is given.
+        Alternatively, a file-like object opened in binary mode. Make
+        sure the file pointer is seeked to the start of the data.
+    :param environ: The WSGI environ for the current request.
+    :param mimetype: The MIME type to send for the file. If not
+        provided, it will try to detect it from the file name.
+    :param as_attachment: Indicate to a browser that it should offer to
+        save the file instead of displaying it.
+    :param download_name: The default name browsers will use when saving
+        the file. Defaults to the passed file name.
+    :param conditional: Enable conditional and range responses based on
+        request headers. Requires passing a file path and ``environ``.
+    :param etag: Calculate an ETag for the file, which requires passing
+        a file path. Can also be a string to use instead.
+    :param last_modified: The last modified time to send for the file,
+        in seconds. If not provided, it will try to detect it from the
+        file path.
+    :param max_age: How long the client should cache the file, in
+        seconds. If set, ``Cache-Control`` will be ``public``, otherwise
+        it will be ``no-cache`` to prefer conditional caching.
+    :param use_x_sendfile: Set the ``X-Sendfile`` header to let the
+        server to efficiently send the file. Requires support from the
+        HTTP server. Requires passing a file path.
+    :param response_class: Build the response using this class. Defaults
+        to :class:`~werkzeug.wrappers.Response`.
+    :param _root_path: Do not use. For internal use only. Use
+        :func:`send_from_directory` to safely send files under a path.
+
+    .. versionadded:: 2.0
+        Adapted from Flask's implementation.
+
+    .. versionchanged:: 2.0
+        ``download_name`` replaces Flask's ``attachment_filename``
+         parameter. If ``as_attachment=False``, it is passed with
+         ``Content-Disposition: inline`` instead.
+
+    .. versionchanged:: 2.0
+        ``max_age`` replaces Flask's ``cache_timeout`` parameter.
+        ``conditional`` is enabled and ``max_age`` is not set by
+        default.
+
+    .. versionchanged:: 2.0
+        ``etag`` replaces Flask's ``add_etags`` parameter. It can be a
+        string to use instead of generating one.
+
+    .. versionchanged:: 2.0
+        If an encoding is returned when guessing ``mimetype`` from
+        ``download_name``, set the ``Content-Encoding`` header.
+    """
+    if response_class is None:
+        from .wrappers import Response
+
+        response_class = Response
+
+    path: t.Optional[str] = None
+    file: t.Optional[t.BinaryIO] = None
+    size: t.Optional[int] = None
+    mtime: t.Optional[float] = None
+    headers = Headers()
+
+    if isinstance(path_or_file, (os.PathLike, str)) or hasattr(
+        path_or_file, "__fspath__"
+    ):
+        path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file)
+
+        # Flask will pass app.root_path, allowing its send_file wrapper
+        # to not have to deal with paths.
+        if _root_path is not None:
+            path = os.path.join(_root_path, path_or_file)
+        else:
+            path = os.path.abspath(path_or_file)
+
+        stat = os.stat(path)
+        size = stat.st_size
+        mtime = stat.st_mtime
+    else:
+        file = path_or_file
+
+    if download_name is None and path is not None:
+        download_name = os.path.basename(path)
+
+    if mimetype is None:
+        if download_name is None:
+            raise TypeError(
+                "Unable to detect the MIME type because a file name is"
+                " not available. Either set 'download_name', pass a"
+                " path instead of a file, or set 'mimetype'."
+            )
+
+        mimetype, encoding = mimetypes.guess_type(download_name)
+
+        if mimetype is None:
+            mimetype = "application/octet-stream"
+
+        if encoding is not None:
+            headers.set("Content-Encoding", encoding)
+
+    if download_name is not None:
+        try:
+            download_name.encode("ascii")
+        except UnicodeEncodeError:
+            simple = unicodedata.normalize("NFKD", download_name)
+            simple = simple.encode("ascii", "ignore").decode("ascii")
+            quoted = url_quote(download_name, safe="")
+            names = {"filename": simple, "filename*": f"UTF-8''{quoted}"}
+        else:
+            names = {"filename": download_name}
+
+        value = "attachment" if as_attachment else "inline"
+        headers.set("Content-Disposition", value, **names)
+    elif as_attachment:
+        raise TypeError(
+            "No name provided for attachment. Either set"
+            " 'download_name' or pass a path instead of a file."
+        )
+
+    if use_x_sendfile and path is not None:
+        headers["X-Sendfile"] = path
+        data = None
+    else:
+        if file is None:
+            file = open(path, "rb")  # type: ignore
+        elif isinstance(file, io.BytesIO):
+            size = file.getbuffer().nbytes
+        elif isinstance(file, io.TextIOBase):
+            raise ValueError("Files must be opened in binary mode or use BytesIO.")
+
+        data = wrap_file(environ, file)
+
+    rv = response_class(
+        data, mimetype=mimetype, headers=headers, direct_passthrough=True
+    )
+
+    if size is not None:
+        rv.content_length = size
+
+    if last_modified is not None:
+        rv.last_modified = last_modified  # type: ignore
+    elif mtime is not None:
+        rv.last_modified = mtime  # type: ignore
+
+    rv.cache_control.no_cache = True
+
+    # Flask will pass app.get_send_file_max_age, allowing its send_file
+    # wrapper to not have to deal with paths.
+    if callable(max_age):
+        max_age = max_age(path)
+
+    if max_age is not None:
+        if max_age > 0:
+            rv.cache_control.no_cache = None
+            rv.cache_control.public = True
+
+        rv.cache_control.max_age = max_age
+        rv.expires = int(time() + max_age)  # type: ignore
+
+    if isinstance(etag, str):
+        rv.set_etag(etag)
+    elif etag and path is not None:
+        check = adler32(path.encode("utf-8")) & 0xFFFFFFFF
+        rv.set_etag(f"{mtime}-{size}-{check}")
+
+    if conditional:
+        try:
+            rv = rv.make_conditional(environ, accept_ranges=True, complete_length=size)
+        except RequestedRangeNotSatisfiable:
+            if file is not None:
+                file.close()
+
+            raise
+
+        # Some x-sendfile implementations incorrectly ignore the 304
+        # status code and send the file anyway.
+        if rv.status_code == 304:
+            rv.headers.pop("x-sendfile", None)
+
+    return rv
+
+
+def send_from_directory(
+    directory: t.Union[os.PathLike, str],
+    path: t.Union[os.PathLike, str],
+    environ: "WSGIEnvironment",
+    **kwargs: t.Any,
+) -> "Response":
+    """Send a file from within a directory using :func:`send_file`.
+
+    This is a secure way to serve files from a folder, such as static
+    files or uploads. Uses :func:`~werkzeug.security.safe_join` to
+    ensure the path coming from the client is not maliciously crafted to
+    point outside the specified directory.
+
+    If the final path does not point to an existing regular file,
+    returns a 404 :exc:`~werkzeug.exceptions.NotFound` error.
+
+    :param directory: The directory that ``path`` must be located under.
+    :param path: The path to the file to send, relative to
+        ``directory``.
+    :param environ: The WSGI environ for the current request.
+    :param kwargs: Arguments to pass to :func:`send_file`.
+
+    .. versionadded:: 2.0
+        Adapted from Flask's implementation.
+    """
+    path = safe_join(os.fspath(directory), os.fspath(path))
+
+    if path is None:
+        raise NotFound()
+
+    # Flask will pass app.root_path, allowing its send_from_directory
+    # wrapper to not have to deal with paths.
+    if "_root_path" in kwargs:
+        path = os.path.join(kwargs["_root_path"], path)
+
+    try:
+        if not os.path.isfile(path):
+            raise NotFound()
+    except ValueError:
+        # path contains null byte on Python < 3.8
+        raise NotFound()
+
+    return send_file(path, environ, **kwargs)
+
+
+def import_string(import_name: str, silent: bool = False) -> t.Any:
+    """Imports an object based on a string.  This is useful if you want to
+    use import paths as endpoints or something similar.  An import path can
+    be specified either in dotted notation (``xml.sax.saxutils.escape``)
+    or with a colon as object delimiter (``xml.sax.saxutils:escape``).
+
+    If `silent` is True the return value will be `None` if the import fails.
+
+    :param import_name: the dotted name for the object to import.
+    :param silent: if set to `True` import errors are ignored and
+                   `None` is returned instead.
+    :return: imported object
+    """
+    import_name = import_name.replace(":", ".")
+    try:
+        try:
+            __import__(import_name)
+        except ImportError:
+            if "." not in import_name:
+                raise
+        else:
+            return sys.modules[import_name]
+
+        module_name, obj_name = import_name.rsplit(".", 1)
+        module = __import__(module_name, globals(), locals(), [obj_name])
+        try:
+            return getattr(module, obj_name)
+        except AttributeError as e:
+            raise ImportError(e)
+
+    except ImportError as e:
+        if not silent:
+            raise ImportStringError(import_name, e).with_traceback(sys.exc_info()[2])
+
+    return None
+
+
+def find_modules(
+    import_path: str, include_packages: bool = False, recursive: bool = False
+) -> t.Iterator[str]:
+    """Finds all the modules below a package.  This can be useful to
+    automatically import all views / controllers so that their metaclasses /
+    function decorators have a chance to register themselves on the
+    application.
+
+    Packages are not returned unless `include_packages` is `True`.  This can
+    also recursively list modules but in that case it will import all the
+    packages to get the correct load path of that module.
+
+    :param import_path: the dotted name for the package to find child modules.
+    :param include_packages: set to `True` if packages should be returned, too.
+    :param recursive: set to `True` if recursion should happen.
+    :return: generator
+    """
+    module = import_string(import_path)
+    path = getattr(module, "__path__", None)
+    if path is None:
+        raise ValueError(f"{import_path!r} is not a package")
+    basename = f"{module.__name__}."
+    for _importer, modname, ispkg in pkgutil.iter_modules(path):
+        modname = basename + modname
+        if ispkg:
+            if include_packages:
+                yield modname
+            if recursive:
+                yield from find_modules(modname, include_packages, True)
+        else:
+            yield modname
+
+
+def validate_arguments(func, args, kwargs, drop_extra=True):  # type: ignore
+    """Checks if the function accepts the arguments and keyword arguments.
+    Returns a new ``(args, kwargs)`` tuple that can safely be passed to
+    the function without causing a `TypeError` because the function signature
+    is incompatible.  If `drop_extra` is set to `True` (which is the default)
+    any extra positional or keyword arguments are dropped automatically.
+
+    The exception raised provides three attributes:
+
+    `missing`
+        A set of argument names that the function expected but where
+        missing.
+
+    `extra`
+        A dict of keyword arguments that the function can not handle but
+        where provided.
+
+    `extra_positional`
+        A list of values that where given by positional argument but the
+        function cannot accept.
+
+    This can be useful for decorators that forward user submitted data to
+    a view function::
+
+        from werkzeug.utils import ArgumentValidationError, validate_arguments
+
+        def sanitize(f):
+            def proxy(request):
+                data = request.values.to_dict()
+                try:
+                    args, kwargs = validate_arguments(f, (request,), data)
+                except ArgumentValidationError:
+                    raise BadRequest('The browser failed to transmit all '
+                                     'the data expected.')
+                return f(*args, **kwargs)
+            return proxy
+
+    :param func: the function the validation is performed against.
+    :param args: a tuple of positional arguments.
+    :param kwargs: a dict of keyword arguments.
+    :param drop_extra: set to `False` if you don't want extra arguments
+                       to be silently dropped.
+    :return: tuple in the form ``(args, kwargs)``.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :func:`inspect.signature`
+        instead.
+    """
+    warnings.warn(
+        "'utils.validate_arguments' is deprecated and will be removed"
+        " in Werkzeug 2.1. Use 'inspect.signature' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    parser = _parse_signature(func)
+    args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5]
+    if missing:
+        raise ArgumentValidationError(tuple(missing))
+    elif (extra or extra_positional) and not drop_extra:
+        raise ArgumentValidationError(None, extra, extra_positional)
+    return tuple(args), kwargs
+
+
+def bind_arguments(func, args, kwargs):  # type: ignore
+    """Bind the arguments provided into a dict.  When passed a function,
+    a tuple of arguments and a dict of keyword arguments `bind_arguments`
+    returns a dict of names as the function would see it.  This can be useful
+    to implement a cache decorator that uses the function arguments to build
+    the cache key based on the values of the arguments.
+
+    :param func: the function the arguments should be bound for.
+    :param args: tuple of positional arguments.
+    :param kwargs: a dict of keyword arguments.
+    :return: a :class:`dict` of bound keyword arguments.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Use :meth:`Signature.bind`
+        instead.
+    """
+    warnings.warn(
+        "'utils.bind_arguments' is deprecated and will be removed in"
+        " Werkzeug 2.1. Use 'Signature.bind' instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    (
+        args,
+        kwargs,
+        missing,
+        extra,
+        extra_positional,
+        arg_spec,
+        vararg_var,
+        kwarg_var,
+    ) = _parse_signature(func)(args, kwargs)
+    values = {}
+    for (name, _has_default, _default), value in zip(arg_spec, args):
+        values[name] = value
+    if vararg_var is not None:
+        values[vararg_var] = tuple(extra_positional)
+    elif extra_positional:
+        raise TypeError("too many positional arguments")
+    if kwarg_var is not None:
+        multikw = set(extra) & {x[0] for x in arg_spec}
+        if multikw:
+            raise TypeError(
+                f"got multiple values for keyword argument {next(iter(multikw))!r}"
+            )
+        values[kwarg_var] = extra
+    elif extra:
+        raise TypeError(f"got unexpected keyword argument {next(iter(extra))!r}")
+    return values
+
+
+class ArgumentValidationError(ValueError):
+    """Raised if :func:`validate_arguments` fails to validate
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1 along with ``utils.bind`` and
+        ``validate_arguments``.
+    """
+
+    def __init__(self, missing=None, extra=None, extra_positional=None):  # type: ignore
+        self.missing = set(missing or ())
+        self.extra = extra or {}
+        self.extra_positional = extra_positional or []
+        super().__init__(
+            "function arguments invalid."
+            f" ({len(self.missing)} missing,"
+            f" {len(self.extra) + len(self.extra_positional)} additional)"
+        )
+
+
+class ImportStringError(ImportError):
+    """Provides information about a failed :func:`import_string` attempt."""
+
+    #: String in dotted notation that failed to be imported.
+    import_name: str
+    #: Wrapped exception.
+    exception: BaseException
+
+    def __init__(self, import_name: str, exception: BaseException) -> None:
+        self.import_name = import_name
+        self.exception = exception
+        msg = import_name
+        name = ""
+        tracked = []
+        for part in import_name.replace(":", ".").split("."):
+            name = f"{name}.{part}" if name else part
+            imported = import_string(name, silent=True)
+            if imported:
+                tracked.append((name, getattr(imported, "__file__", None)))
+            else:
+                track = [f"- {n!r} found in {i!r}." for n, i in tracked]
+                track.append(f"- {name!r} not found.")
+                track_str = "\n".join(track)
+                msg = (
+                    f"import_string() failed for {import_name!r}. Possible reasons"
+                    f" are:\n\n"
+                    "- missing __init__.py in a package;\n"
+                    "- package or module path not included in sys.path;\n"
+                    "- duplicated package or module name taking precedence in"
+                    " sys.path;\n"
+                    "- missing module, class, function or variable;\n\n"
+                    f"Debugged import:\n\n{track_str}\n\n"
+                    f"Original exception:\n\n{type(exception).__name__}: {exception}"
+                )
+                break
+
+        super().__init__(msg)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__}({self.import_name!r}, {self.exception!r})>"
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/__init__.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/__init__.py
new file mode 100644
index 00000000..eb69a994
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/__init__.py
@@ -0,0 +1,16 @@
+from .accept import AcceptMixin
+from .auth import AuthorizationMixin
+from .auth import WWWAuthenticateMixin
+from .base_request import BaseRequest
+from .base_response import BaseResponse
+from .common_descriptors import CommonRequestDescriptorsMixin
+from .common_descriptors import CommonResponseDescriptorsMixin
+from .etag import ETagRequestMixin
+from .etag import ETagResponseMixin
+from .request import PlainRequest
+from .request import Request as Request
+from .request import StreamOnlyMixin
+from .response import Response as Response
+from .response import ResponseStream
+from .response import ResponseStreamMixin
+from .user_agent import UserAgentMixin
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/accept.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/accept.py
new file mode 100644
index 00000000..9605e637
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/accept.py
@@ -0,0 +1,14 @@
+import typing as t
+import warnings
+
+
+class AcceptMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'AcceptMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/auth.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/auth.py
new file mode 100644
index 00000000..da31b7cf
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/auth.py
@@ -0,0 +1,26 @@
+import typing as t
+import warnings
+
+
+class AuthorizationMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'AuthorizationMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
+
+
+class WWWAuthenticateMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'WWWAuthenticateMixin' is deprecated and will be removed"
+            " in Werkzeug 2.1. 'Response' now includes the"
+            " functionality directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_request.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_request.py
new file mode 100644
index 00000000..451989fd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_request.py
@@ -0,0 +1,36 @@
+import typing as t
+import warnings
+
+from .request import Request
+
+
+class _FakeSubclassCheck(type):
+    def __subclasscheck__(cls, subclass: t.Type) -> bool:
+        warnings.warn(
+            "'BaseRequest' is deprecated and will be removed in"
+            " Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return issubclass(subclass, Request)
+
+    def __instancecheck__(cls, instance: t.Any) -> bool:
+        warnings.warn(
+            "'BaseRequest' is deprecated and will be removed in"
+            " Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return isinstance(instance, Request)
+
+
+class BaseRequest(Request, metaclass=_FakeSubclassCheck):
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'BaseRequest' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_response.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_response.py
new file mode 100644
index 00000000..3e0dc676
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/base_response.py
@@ -0,0 +1,36 @@
+import typing as t
+import warnings
+
+from .response import Response
+
+
+class _FakeSubclassCheck(type):
+    def __subclasscheck__(cls, subclass: t.Type) -> bool:
+        warnings.warn(
+            "'BaseResponse' is deprecated and will be removed in"
+            " Werkzeug 2.1. Use 'issubclass(cls, Response)' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return issubclass(subclass, Response)
+
+    def __instancecheck__(cls, instance: t.Any) -> bool:
+        warnings.warn(
+            "'BaseResponse' is deprecated and will be removed in"
+            " Werkzeug 2.1. Use 'isinstance(obj, Response)' instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return isinstance(instance, Response)
+
+
+class BaseResponse(Response, metaclass=_FakeSubclassCheck):
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'BaseResponse' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Response' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/common_descriptors.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/common_descriptors.py
new file mode 100644
index 00000000..db87ea5f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/common_descriptors.py
@@ -0,0 +1,26 @@
+import typing as t
+import warnings
+
+
+class CommonRequestDescriptorsMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'CommonRequestDescriptorsMixin' is deprecated and will be"
+            " removed in Werkzeug 2.1. 'Request' now includes the"
+            " functionality directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
+
+
+class CommonResponseDescriptorsMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'CommonResponseDescriptorsMixin' is deprecated and will be"
+            " removed in Werkzeug 2.1. 'Response' now includes the"
+            " functionality directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/cors.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/cors.py
new file mode 100644
index 00000000..89cf83ef
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/cors.py
@@ -0,0 +1,26 @@
+import typing as t
+import warnings
+
+
+class CORSRequestMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'CORSRequestMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
+
+
+class CORSResponseMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'CORSResponseMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Response' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/etag.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/etag.py
new file mode 100644
index 00000000..2e9015a5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/etag.py
@@ -0,0 +1,26 @@
+import typing as t
+import warnings
+
+
+class ETagRequestMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'ETagRequestMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
+
+
+class ETagResponseMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'ETagResponseMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Response' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/json.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/json.py
new file mode 100644
index 00000000..ab6ed7ba
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/json.py
@@ -0,0 +1,13 @@
+import typing as t
+import warnings
+
+
+class JSONMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'JSONMixin' is deprecated and will be removed in Werkzeug"
+            " 2.1. 'Request' now includes the functionality directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/request.py
new file mode 100644
index 00000000..60c3b5f4
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/request.py
@@ -0,0 +1,660 @@
+import functools
+import json
+import typing
+import typing as t
+import warnings
+from io import BytesIO
+
+from .._internal import _wsgi_decoding_dance
+from ..datastructures import CombinedMultiDict
+from ..datastructures import EnvironHeaders
+from ..datastructures import FileStorage
+from ..datastructures import ImmutableMultiDict
+from ..datastructures import iter_multi_items
+from ..datastructures import MultiDict
+from ..formparser import default_stream_factory
+from ..formparser import FormDataParser
+from ..sansio.request import Request as _SansIORequest
+from ..utils import cached_property
+from ..utils import environ_property
+from ..wsgi import _get_server
+from ..wsgi import get_input_stream
+from werkzeug.exceptions import BadRequest
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+class Request(_SansIORequest):
+    """Represents an incoming WSGI HTTP request, with headers and body
+    taken from the WSGI environment. Has properties and methods for
+    using the functionality defined by various HTTP specs. The data in
+    requests object is read-only.
+
+    Text data is assumed to use UTF-8 encoding, which should be true for
+    the vast majority of modern clients. Using an encoding set by the
+    client is unsafe in Python due to extra encodings it provides, such
+    as ``zip``. To change the assumed encoding, subclass and replace
+    :attr:`charset`.
+
+    :param environ: The WSGI environ is generated by the WSGI server and
+        contains information about the server configuration and client
+        request.
+    :param populate_request: Add this request object to the WSGI environ
+        as ``environ['werkzeug.request']``. Can be useful when
+        debugging.
+    :param shallow: Makes reading from :attr:`stream` (and any method
+        that would read from it) raise a :exc:`RuntimeError`. Useful to
+        prevent consuming the form data in middleware, which would make
+        it unavailable to the final application.
+
+    .. versionchanged:: 2.0
+        Combine ``BaseRequest`` and mixins into a single ``Request``
+        class. Using the old classes is deprecated and will be removed
+        in Werkzeug 2.1.
+
+    .. versionchanged:: 0.5
+        Read-only mode is enforced with immutable classes for all data.
+    """
+
+    #: the maximum content length.  This is forwarded to the form data
+    #: parsing function (:func:`parse_form_data`).  When set and the
+    #: :attr:`form` or :attr:`files` attribute is accessed and the
+    #: parsing fails because more than the specified value is transmitted
+    #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised.
+    #:
+    #: Have a look at :doc:`/request_data` for more details.
+    #:
+    #: .. versionadded:: 0.5
+    max_content_length: t.Optional[int] = None
+
+    #: the maximum form field size.  This is forwarded to the form data
+    #: parsing function (:func:`parse_form_data`).  When set and the
+    #: :attr:`form` or :attr:`files` attribute is accessed and the
+    #: data in memory for post data is longer than the specified value a
+    #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised.
+    #:
+    #: Have a look at :doc:`/request_data` for more details.
+    #:
+    #: .. versionadded:: 0.5
+    max_form_memory_size: t.Optional[int] = None
+
+    #: The form data parser that shoud be used.  Can be replaced to customize
+    #: the form date parsing.
+    form_data_parser_class: t.Type[FormDataParser] = FormDataParser
+
+    #: Disable the :attr:`data` property to avoid reading from the input
+    #: stream.
+    #:
+    #: .. deprecated:: 2.0
+    #:     Will be removed in Werkzeug 2.1. Create the request with
+    #:     ``shallow=True`` instead.
+    #:
+    #: .. versionadded:: 0.9
+    disable_data_descriptor: t.Optional[bool] = None
+
+    #: The WSGI environment containing HTTP headers and information from
+    #: the WSGI server.
+    environ: "WSGIEnvironment"
+
+    #: Set when creating the request object. If ``True``, reading from
+    #: the request body will cause a ``RuntimeException``. Useful to
+    #: prevent modifying the stream from middleware.
+    shallow: bool
+
+    def __init__(
+        self,
+        environ: "WSGIEnvironment",
+        populate_request: bool = True,
+        shallow: bool = False,
+    ) -> None:
+        super().__init__(
+            method=environ.get("REQUEST_METHOD", "GET"),
+            scheme=environ.get("wsgi.url_scheme", "http"),
+            server=_get_server(environ),
+            root_path=_wsgi_decoding_dance(
+                environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors
+            ),
+            path=_wsgi_decoding_dance(
+                environ.get("PATH_INFO") or "", self.charset, self.encoding_errors
+            ),
+            query_string=environ.get("QUERY_STRING", "").encode("latin1"),
+            headers=EnvironHeaders(environ),
+            remote_addr=environ.get("REMOTE_ADDR"),
+        )
+        self.environ = environ
+
+        if self.disable_data_descriptor is not None:
+            warnings.warn(
+                "'disable_data_descriptor' is deprecated and will be"
+                " removed in Werkzeug 2.1. Create the request with"
+                " 'shallow=True' instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            shallow = shallow or self.disable_data_descriptor
+
+        self.shallow = shallow
+
+        if populate_request and not shallow:
+            self.environ["werkzeug.request"] = self
+
+    @classmethod
+    def from_values(cls, *args: t.Any, **kwargs: t.Any) -> "Request":
+        """Create a new request object based on the values provided.  If
+        environ is given missing values are filled from there.  This method is
+        useful for small scripts when you need to simulate a request from an URL.
+        Do not use this method for unittesting, there is a full featured client
+        object (:class:`Client`) that allows to create multipart requests,
+        support for cookies etc.
+
+        This accepts the same options as the
+        :class:`~werkzeug.test.EnvironBuilder`.
+
+        .. versionchanged:: 0.5
+           This method now accepts the same arguments as
+           :class:`~werkzeug.test.EnvironBuilder`.  Because of this the
+           `environ` parameter is now called `environ_overrides`.
+
+        :return: request object
+        """
+        from ..test import EnvironBuilder
+
+        charset = kwargs.pop("charset", cls.charset)
+        kwargs["charset"] = charset
+        builder = EnvironBuilder(*args, **kwargs)
+        try:
+            return builder.get_request(cls)
+        finally:
+            builder.close()
+
+    @classmethod
+    def application(
+        cls, f: t.Callable[["Request"], "WSGIApplication"]
+    ) -> "WSGIApplication":
+        """Decorate a function as responder that accepts the request as
+        the last argument.  This works like the :func:`responder`
+        decorator but the function is passed the request object as the
+        last argument and the request object will be closed
+        automatically::
+
+            @Request.application
+            def my_wsgi_app(request):
+                return Response('Hello World!')
+
+        As of Werkzeug 0.14 HTTP exceptions are automatically caught and
+        converted to responses instead of failing.
+
+        :param f: the WSGI callable to decorate
+        :return: a new WSGI callable
+        """
+        #: return a callable that wraps the -2nd argument with the request
+        #: and calls the function with all the arguments up to that one and
+        #: the request.  The return value is then called with the latest
+        #: two arguments.  This makes it possible to use this decorator for
+        #: both standalone WSGI functions as well as bound methods and
+        #: partially applied functions.
+        from ..exceptions import HTTPException
+
+        @functools.wraps(f)
+        def application(*args):  # type: ignore
+            request = cls(args[-2])
+            with request:
+                try:
+                    resp = f(*args[:-2] + (request,))
+                except HTTPException as e:
+                    resp = e.get_response(args[-2])
+                return resp(*args[-2:])
+
+        return t.cast("WSGIApplication", application)
+
+    def _get_file_stream(
+        self,
+        total_content_length: t.Optional[int],
+        content_type: t.Optional[str],
+        filename: t.Optional[str] = None,
+        content_length: t.Optional[int] = None,
+    ) -> t.BinaryIO:
+        """Called to get a stream for the file upload.
+
+        This must provide a file-like class with `read()`, `readline()`
+        and `seek()` methods that is both writeable and readable.
+
+        The default implementation returns a temporary file if the total
+        content length is higher than 500KB.  Because many browsers do not
+        provide a content length for the files only the total content
+        length matters.
+
+        :param total_content_length: the total content length of all the
+                                     data in the request combined.  This value
+                                     is guaranteed to be there.
+        :param content_type: the mimetype of the uploaded file.
+        :param filename: the filename of the uploaded file.  May be `None`.
+        :param content_length: the length of this file.  This value is usually
+                               not provided because webbrowsers do not provide
+                               this value.
+        """
+        return default_stream_factory(
+            total_content_length=total_content_length,
+            filename=filename,
+            content_type=content_type,
+            content_length=content_length,
+        )
+
+    @property
+    def want_form_data_parsed(self) -> bool:
+        """``True`` if the request method carries content. By default
+        this is true if a ``Content-Type`` is sent.
+
+        .. versionadded:: 0.8
+        """
+        return bool(self.environ.get("CONTENT_TYPE"))
+
+    def make_form_data_parser(self) -> FormDataParser:
+        """Creates the form data parser. Instantiates the
+        :attr:`form_data_parser_class` with some parameters.
+
+        .. versionadded:: 0.8
+        """
+        return self.form_data_parser_class(
+            self._get_file_stream,
+            self.charset,
+            self.encoding_errors,
+            self.max_form_memory_size,
+            self.max_content_length,
+            self.parameter_storage_class,
+        )
+
+    def _load_form_data(self) -> None:
+        """Method used internally to retrieve submitted data.  After calling
+        this sets `form` and `files` on the request object to multi dicts
+        filled with the incoming form data.  As a matter of fact the input
+        stream will be empty afterwards.  You can also call this method to
+        force the parsing of the form data.
+
+        .. versionadded:: 0.8
+        """
+        # abort early if we have already consumed the stream
+        if "form" in self.__dict__:
+            return
+
+        if self.want_form_data_parsed:
+            parser = self.make_form_data_parser()
+            data = parser.parse(
+                self._get_stream_for_parsing(),
+                self.mimetype,
+                self.content_length,
+                self.mimetype_params,
+            )
+        else:
+            data = (
+                self.stream,
+                self.parameter_storage_class(),
+                self.parameter_storage_class(),
+            )
+
+        # inject the values into the instance dict so that we bypass
+        # our cached_property non-data descriptor.
+        d = self.__dict__
+        d["stream"], d["form"], d["files"] = data
+
+    def _get_stream_for_parsing(self) -> t.BinaryIO:
+        """This is the same as accessing :attr:`stream` with the difference
+        that if it finds cached data from calling :meth:`get_data` first it
+        will create a new stream out of the cached data.
+
+        .. versionadded:: 0.9.3
+        """
+        cached_data = getattr(self, "_cached_data", None)
+        if cached_data is not None:
+            return BytesIO(cached_data)
+        return self.stream
+
+    def close(self) -> None:
+        """Closes associated resources of this request object.  This
+        closes all file handles explicitly.  You can also use the request
+        object in a with statement which will automatically close it.
+
+        .. versionadded:: 0.9
+        """
+        files = self.__dict__.get("files")
+        for _key, value in iter_multi_items(files or ()):
+            value.close()
+
+    def __enter__(self) -> "Request":
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb) -> None:  # type: ignore
+        self.close()
+
+    @cached_property
+    def stream(self) -> t.BinaryIO:
+        """
+        If the incoming form data was not encoded with a known mimetype
+        the data is stored unmodified in this stream for consumption.  Most
+        of the time it is a better idea to use :attr:`data` which will give
+        you that data as a string.  The stream only returns the data once.
+
+        Unlike :attr:`input_stream` this stream is properly guarded that you
+        can't accidentally read past the length of the input.  Werkzeug will
+        internally always refer to this stream to read data which makes it
+        possible to wrap this object with a stream that does filtering.
+
+        .. versionchanged:: 0.9
+           This stream is now always available but might be consumed by the
+           form parser later on.  Previously the stream was only set if no
+           parsing happened.
+        """
+        if self.shallow:
+            raise RuntimeError(
+                "This request was created with 'shallow=True', reading"
+                " from the input stream is disabled."
+            )
+
+        return get_input_stream(self.environ)
+
+    input_stream = environ_property[t.BinaryIO](
+        "wsgi.input",
+        doc="""The WSGI input stream.
+
+        In general it's a bad idea to use this one because you can
+        easily read past the boundary.  Use the :attr:`stream`
+        instead.""",
+    )
+
+    @cached_property
+    def data(self) -> bytes:
+        """
+        Contains the incoming request data as string in case it came with
+        a mimetype Werkzeug does not handle.
+        """
+        return self.get_data(parse_form_data=True)
+
+    @typing.overload
+    def get_data(  # type: ignore
+        self,
+        cache: bool = True,
+        as_text: "te.Literal[False]" = False,
+        parse_form_data: bool = False,
+    ) -> bytes:
+        ...
+
+    @typing.overload
+    def get_data(
+        self,
+        cache: bool = True,
+        as_text: "te.Literal[True]" = ...,
+        parse_form_data: bool = False,
+    ) -> str:
+        ...
+
+    def get_data(
+        self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False
+    ) -> t.Union[bytes, str]:
+        """This reads the buffered incoming data from the client into one
+        bytes object.  By default this is cached but that behavior can be
+        changed by setting `cache` to `False`.
+
+        Usually it's a bad idea to call this method without checking the
+        content length first as a client could send dozens of megabytes or more
+        to cause memory problems on the server.
+
+        Note that if the form data was already parsed this method will not
+        return anything as form data parsing does not cache the data like
+        this method does.  To implicitly invoke form data parsing function
+        set `parse_form_data` to `True`.  When this is done the return value
+        of this method will be an empty string if the form parser handles
+        the data.  This generally is not necessary as if the whole data is
+        cached (which is the default) the form parser will used the cached
+        data to parse the form data.  Please be generally aware of checking
+        the content length first in any case before calling this method
+        to avoid exhausting server memory.
+
+        If `as_text` is set to `True` the return value will be a decoded
+        string.
+
+        .. versionadded:: 0.9
+        """
+        rv = getattr(self, "_cached_data", None)
+        if rv is None:
+            if parse_form_data:
+                self._load_form_data()
+            rv = self.stream.read()
+            if cache:
+                self._cached_data = rv
+        if as_text:
+            rv = rv.decode(self.charset, self.encoding_errors)
+        return rv  # type: ignore
+
+    @cached_property
+    def form(self) -> "ImmutableMultiDict[str, str]":
+        """The form parameters.  By default an
+        :class:`~werkzeug.datastructures.ImmutableMultiDict`
+        is returned from this function.  This can be changed by setting
+        :attr:`parameter_storage_class` to a different type.  This might
+        be necessary if the order of the form data is important.
+
+        Please keep in mind that file uploads will not end up here, but instead
+        in the :attr:`files` attribute.
+
+        .. versionchanged:: 0.9
+
+            Previous to Werkzeug 0.9 this would only contain form data for POST
+            and PUT requests.
+        """
+        self._load_form_data()
+        return self.form
+
+    @cached_property
+    def values(self) -> "CombinedMultiDict[str, str]":
+        """A :class:`werkzeug.datastructures.CombinedMultiDict` that
+        combines :attr:`args` and :attr:`form`.
+
+        For GET requests, only ``args`` are present, not ``form``.
+
+        .. versionchanged:: 2.0
+            For GET requests, only ``args`` are present, not ``form``.
+        """
+        sources = [self.args]
+
+        if self.method != "GET":
+            # GET requests can have a body, and some caching proxies
+            # might not treat that differently than a normal GET
+            # request, allowing form data to "invisibly" affect the
+            # cache without indication in the query string / URL.
+            sources.append(self.form)
+
+        args = []
+
+        for d in sources:
+            if not isinstance(d, MultiDict):
+                d = MultiDict(d)
+
+            args.append(d)
+
+        return CombinedMultiDict(args)
+
+    @cached_property
+    def files(self) -> "ImmutableMultiDict[str, FileStorage]":
+        """:class:`~werkzeug.datastructures.MultiDict` object containing
+        all uploaded files.  Each key in :attr:`files` is the name from the
+        ``<input type="file" name="">``.  Each value in :attr:`files` is a
+        Werkzeug :class:`~werkzeug.datastructures.FileStorage` object.
+
+        It basically behaves like a standard file object you know from Python,
+        with the difference that it also has a
+        :meth:`~werkzeug.datastructures.FileStorage.save` function that can
+        store the file on the filesystem.
+
+        Note that :attr:`files` will only contain data if the request method was
+        POST, PUT or PATCH and the ``<form>`` that posted to the request had
+        ``enctype="multipart/form-data"``.  It will be empty otherwise.
+
+        See the :class:`~werkzeug.datastructures.MultiDict` /
+        :class:`~werkzeug.datastructures.FileStorage` documentation for
+        more details about the used data structure.
+        """
+        self._load_form_data()
+        return self.files
+
+    @property
+    def script_root(self) -> str:
+        """Alias for :attr:`self.root_path`. ``environ["SCRIPT_ROOT"]``
+        without a trailing slash.
+        """
+        return self.root_path
+
+    @cached_property
+    def url_root(self) -> str:
+        """Alias for :attr:`root_url`. The URL with scheme, host, and
+        root path. For example, ``https://example.com/app/``.
+        """
+        return self.root_url
+
+    remote_user = environ_property[str](
+        "REMOTE_USER",
+        doc="""If the server supports user authentication, and the
+        script is protected, this attribute contains the username the
+        user has authenticated as.""",
+    )
+    is_multithread = environ_property[bool](
+        "wsgi.multithread",
+        doc="""boolean that is `True` if the application is served by a
+        multithreaded WSGI server.""",
+    )
+    is_multiprocess = environ_property[bool](
+        "wsgi.multiprocess",
+        doc="""boolean that is `True` if the application is served by a
+        WSGI server that spawns multiple processes.""",
+    )
+    is_run_once = environ_property[bool](
+        "wsgi.run_once",
+        doc="""boolean that is `True` if the application will be
+        executed only once in a process lifetime.  This is the case for
+        CGI for example, but it's not guaranteed that the execution only
+        happens one time.""",
+    )
+
+    # JSON
+
+    #: A module or other object that has ``dumps`` and ``loads``
+    #: functions that match the API of the built-in :mod:`json` module.
+    json_module = json
+
+    @property
+    def json(self) -> t.Optional[t.Any]:
+        """The parsed JSON data if :attr:`mimetype` indicates JSON
+        (:mimetype:`application/json`, see :meth:`is_json`).
+
+        Calls :meth:`get_json` with default arguments.
+        """
+        return self.get_json()
+
+    # Cached values for ``(silent=False, silent=True)``. Initialized
+    # with sentinel values.
+    _cached_json: t.Tuple[t.Any, t.Any] = (Ellipsis, Ellipsis)
+
+    def get_json(
+        self, force: bool = False, silent: bool = False, cache: bool = True
+    ) -> t.Optional[t.Any]:
+        """Parse :attr:`data` as JSON.
+
+        If the mimetype does not indicate JSON
+        (:mimetype:`application/json`, see :meth:`is_json`), this
+        returns ``None``.
+
+        If parsing fails, :meth:`on_json_loading_failed` is called and
+        its return value is used as the return value.
+
+        :param force: Ignore the mimetype and always try to parse JSON.
+        :param silent: Silence parsing errors and return ``None``
+            instead.
+        :param cache: Store the parsed JSON to return for subsequent
+            calls.
+        """
+        if cache and self._cached_json[silent] is not Ellipsis:
+            return self._cached_json[silent]
+
+        if not (force or self.is_json):
+            return None
+
+        data = self.get_data(cache=cache)
+
+        try:
+            rv = self.json_module.loads(data)
+        except ValueError as e:
+            if silent:
+                rv = None
+
+                if cache:
+                    normal_rv, _ = self._cached_json
+                    self._cached_json = (normal_rv, rv)
+            else:
+                rv = self.on_json_loading_failed(e)
+
+                if cache:
+                    _, silent_rv = self._cached_json
+                    self._cached_json = (rv, silent_rv)
+        else:
+            if cache:
+                self._cached_json = (rv, rv)
+
+        return rv
+
+    def on_json_loading_failed(self, e: ValueError) -> t.Any:
+        """Called if :meth:`get_json` parsing fails and isn't silenced.
+        If this method returns a value, it is used as the return value
+        for :meth:`get_json`. The default implementation raises
+        :exc:`~werkzeug.exceptions.BadRequest`.
+        """
+        raise BadRequest(f"Failed to decode JSON object: {e}")
+
+
+class StreamOnlyMixin:
+    """Mixin to create a ``Request`` that disables the ``data``,
+    ``form``, and ``files`` properties. Only ``stream`` is available.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Create the request with
+        ``shallow=True`` instead.
+
+    .. versionadded:: 0.9
+    """
+
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'StreamOnlyMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. Create the request with 'shallow=True'"
+            " instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        kwargs["shallow"] = True
+        super().__init__(*args, **kwargs)  # type: ignore
+
+
+class PlainRequest(StreamOnlyMixin, Request):
+    """A request object without ``data``, ``form``, and ``files``.
+
+    .. deprecated:: 2.0
+        Will be removed in Werkzeug 2.1. Create the request with
+        ``shallow=True`` instead.
+
+    .. versionadded:: 0.9
+    """
+
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'PlainRequest' is deprecated and will be removed in"
+            " Werkzeug 2.1. Create the request with 'shallow=True'"
+            " instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+        # Don't show the DeprecationWarning for StreamOnlyMixin.
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore", DeprecationWarning)
+            super().__init__(*args, **kwargs)
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/response.py
new file mode 100644
index 00000000..a43c8bca
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/response.py
@@ -0,0 +1,890 @@
+import json
+import typing
+import typing as t
+import warnings
+from http import HTTPStatus
+
+from .._internal import _to_bytes
+from ..datastructures import Headers
+from ..http import remove_entity_headers
+from ..sansio.response import Response as _SansIOResponse
+from ..urls import iri_to_uri
+from ..urls import url_join
+from ..utils import cached_property
+from ..wsgi import ClosingIterator
+from ..wsgi import get_current_url
+from werkzeug._internal import _get_environ
+from werkzeug.http import generate_etag
+from werkzeug.http import http_date
+from werkzeug.http import is_resource_modified
+from werkzeug.http import parse_etags
+from werkzeug.http import parse_range_header
+from werkzeug.wsgi import _RangeWrapper
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from _typeshed.wsgi import StartResponse
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+def _warn_if_string(iterable: t.Iterable) -> None:
+    """Helper for the response objects to check if the iterable returned
+    to the WSGI server is not a string.
+    """
+    if isinstance(iterable, str):
+        warnings.warn(
+            "Response iterable was set to a string. This will appear to"
+            " work but means that the server will send the data to the"
+            " client one character at a time. This is almost never"
+            " intended behavior, use 'response.data' to assign strings"
+            " to the response object.",
+            stacklevel=2,
+        )
+
+
+def _iter_encoded(
+    iterable: t.Iterable[t.Union[str, bytes]], charset: str
+) -> t.Iterator[bytes]:
+    for item in iterable:
+        if isinstance(item, str):
+            yield item.encode(charset)
+        else:
+            yield item
+
+
+def _clean_accept_ranges(accept_ranges: t.Union[bool, str]) -> str:
+    if accept_ranges is True:
+        return "bytes"
+    elif accept_ranges is False:
+        return "none"
+    elif isinstance(accept_ranges, str):
+        return accept_ranges
+    raise ValueError("Invalid accept_ranges value")
+
+
+class Response(_SansIOResponse):
+    """Represents an outgoing WSGI HTTP response with body, status, and
+    headers. Has properties and methods for using the functionality
+    defined by various HTTP specs.
+
+    The response body is flexible to support different use cases. The
+    simple form is passing bytes, or a string which will be encoded as
+    UTF-8. Passing an iterable of bytes or strings makes this a
+    streaming response. A generator is particularly useful for building
+    a CSV file in memory or using SSE (Server Sent Events). A file-like
+    object is also iterable, although the
+    :func:`~werkzeug.utils.send_file` helper should be used in that
+    case.
+
+    The response object is itself a WSGI application callable. When
+    called (:meth:`__call__`) with ``environ`` and ``start_response``,
+    it will pass its status and headers to ``start_response`` then
+    return its body as an iterable.
+
+    .. code-block:: python
+
+        from werkzeug.wrappers.response import Response
+
+        def index():
+            return Response("Hello, World!")
+
+        def application(environ, start_response):
+            path = environ.get("PATH_INFO") or "/"
+
+            if path == "/":
+                response = index()
+            else:
+                response = Response("Not Found", status=404)
+
+            return response(environ, start_response)
+
+    :param response: The data for the body of the response. A string or
+        bytes, or tuple or list of strings or bytes, for a fixed-length
+        response, or any other iterable of strings or bytes for a
+        streaming response. Defaults to an empty body.
+    :param status: The status code for the response. Either an int, in
+        which case the default status message is added, or a string in
+        the form ``{code} {message}``, like ``404 Not Found``. Defaults
+        to 200.
+    :param headers: A :class:`~werkzeug.datastructures.Headers` object,
+        or a list of ``(key, value)`` tuples that will be converted to a
+        ``Headers`` object.
+    :param mimetype: The mime type (content type without charset or
+        other parameters) of the response. If the value starts with
+        ``text/`` (or matches some other special cases), the charset
+        will be added to create the ``content_type``.
+    :param content_type: The full content type of the response.
+        Overrides building the value from ``mimetype``.
+    :param direct_passthrough: Pass the response body directly through
+        as the WSGI iterable. This can be used when the body is a binary
+        file or other iterator of bytes, to skip some unnecessary
+        checks. Use :func:`~werkzeug.utils.send_file` instead of setting
+        this manually.
+
+    .. versionchanged:: 2.0
+        Combine ``BaseResponse`` and mixins into a single ``Response``
+        class. Using the old classes is deprecated and will be removed
+        in Werkzeug 2.1.
+
+    .. versionchanged:: 0.5
+        The ``direct_passthrough`` parameter was added.
+    """
+
+    #: if set to `False` accessing properties on the response object will
+    #: not try to consume the response iterator and convert it into a list.
+    #:
+    #: .. versionadded:: 0.6.2
+    #:
+    #:    That attribute was previously called `implicit_seqence_conversion`.
+    #:    (Notice the typo).  If you did use this feature, you have to adapt
+    #:    your code to the name change.
+    implicit_sequence_conversion = True
+
+    #: Should this response object correct the location header to be RFC
+    #: conformant?  This is true by default.
+    #:
+    #: .. versionadded:: 0.8
+    autocorrect_location_header = True
+
+    #: Should this response object automatically set the content-length
+    #: header if possible?  This is true by default.
+    #:
+    #: .. versionadded:: 0.8
+    automatically_set_content_length = True
+
+    #: The response body to send as the WSGI iterable. A list of strings
+    #: or bytes represents a fixed-length response, any other iterable
+    #: is a streaming response. Strings are encoded to bytes as UTF-8.
+    #:
+    #: Do not set to a plain string or bytes, that will cause sending
+    #: the response to be very inefficient as it will iterate one byte
+    #: at a time.
+    response: t.Union[t.Iterable[str], t.Iterable[bytes]]
+
+    def __init__(
+        self,
+        response: t.Optional[
+            t.Union[t.Iterable[bytes], bytes, t.Iterable[str], str]
+        ] = None,
+        status: t.Optional[t.Union[int, str, HTTPStatus]] = None,
+        headers: t.Optional[
+            t.Union[
+                t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]],
+                t.Iterable[t.Tuple[str, t.Union[str, int]]],
+            ]
+        ] = None,
+        mimetype: t.Optional[str] = None,
+        content_type: t.Optional[str] = None,
+        direct_passthrough: bool = False,
+    ) -> None:
+        super().__init__(
+            status=status,
+            headers=headers,
+            mimetype=mimetype,
+            content_type=content_type,
+        )
+
+        #: Pass the response body directly through as the WSGI iterable.
+        #: This can be used when the body is a binary file or other
+        #: iterator of bytes, to skip some unnecessary checks. Use
+        #: :func:`~werkzeug.utils.send_file` instead of setting this
+        #: manually.
+        self.direct_passthrough = direct_passthrough
+        self._on_close: t.List[t.Callable[[], t.Any]] = []
+
+        # we set the response after the headers so that if a class changes
+        # the charset attribute, the data is set in the correct charset.
+        if response is None:
+            self.response = []
+        elif isinstance(response, (str, bytes, bytearray)):
+            self.set_data(response)
+        else:
+            self.response = response
+
+    def call_on_close(self, func: t.Callable[[], t.Any]) -> t.Callable[[], t.Any]:
+        """Adds a function to the internal list of functions that should
+        be called as part of closing down the response.  Since 0.7 this
+        function also returns the function that was passed so that this
+        can be used as a decorator.
+
+        .. versionadded:: 0.6
+        """
+        self._on_close.append(func)
+        return func
+
+    def __repr__(self) -> str:
+        if self.is_sequence:
+            body_info = f"{sum(map(len, self.iter_encoded()))} bytes"
+        else:
+            body_info = "streamed" if self.is_streamed else "likely-streamed"
+        return f"<{type(self).__name__} {body_info} [{self.status}]>"
+
+    @classmethod
+    def force_type(
+        cls, response: "Response", environ: t.Optional["WSGIEnvironment"] = None
+    ) -> "Response":
+        """Enforce that the WSGI response is a response object of the current
+        type.  Werkzeug will use the :class:`Response` internally in many
+        situations like the exceptions.  If you call :meth:`get_response` on an
+        exception you will get back a regular :class:`Response` object, even
+        if you are using a custom subclass.
+
+        This method can enforce a given response type, and it will also
+        convert arbitrary WSGI callables into response objects if an environ
+        is provided::
+
+            # convert a Werkzeug response object into an instance of the
+            # MyResponseClass subclass.
+            response = MyResponseClass.force_type(response)
+
+            # convert any WSGI application into a response object
+            response = MyResponseClass.force_type(response, environ)
+
+        This is especially useful if you want to post-process responses in
+        the main dispatcher and use functionality provided by your subclass.
+
+        Keep in mind that this will modify response objects in place if
+        possible!
+
+        :param response: a response object or wsgi application.
+        :param environ: a WSGI environment object.
+        :return: a response object.
+        """
+        if not isinstance(response, Response):
+            if environ is None:
+                raise TypeError(
+                    "cannot convert WSGI application into response"
+                    " objects without an environ"
+                )
+
+            from ..test import run_wsgi_app
+
+            response = Response(*run_wsgi_app(response, environ))
+
+        response.__class__ = cls
+        return response
+
+    @classmethod
+    def from_app(
+        cls, app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False
+    ) -> "Response":
+        """Create a new response object from an application output.  This
+        works best if you pass it an application that returns a generator all
+        the time.  Sometimes applications may use the `write()` callable
+        returned by the `start_response` function.  This tries to resolve such
+        edge cases automatically.  But if you don't get the expected output
+        you should set `buffered` to `True` which enforces buffering.
+
+        :param app: the WSGI application to execute.
+        :param environ: the WSGI environment to execute against.
+        :param buffered: set to `True` to enforce buffering.
+        :return: a response object.
+        """
+        from ..test import run_wsgi_app
+
+        return cls(*run_wsgi_app(app, environ, buffered))
+
+    @typing.overload
+    def get_data(self, as_text: "te.Literal[False]" = False) -> bytes:
+        ...
+
+    @typing.overload
+    def get_data(self, as_text: "te.Literal[True]") -> str:
+        ...
+
+    def get_data(self, as_text: bool = False) -> t.Union[bytes, str]:
+        """The string representation of the response body.  Whenever you call
+        this property the response iterable is encoded and flattened.  This
+        can lead to unwanted behavior if you stream big data.
+
+        This behavior can be disabled by setting
+        :attr:`implicit_sequence_conversion` to `False`.
+
+        If `as_text` is set to `True` the return value will be a decoded
+        string.
+
+        .. versionadded:: 0.9
+        """
+        self._ensure_sequence()
+        rv = b"".join(self.iter_encoded())
+
+        if as_text:
+            return rv.decode(self.charset)
+
+        return rv
+
+    def set_data(self, value: t.Union[bytes, str]) -> None:
+        """Sets a new string as response.  The value must be a string or
+        bytes. If a string is set it's encoded to the charset of the
+        response (utf-8 by default).
+
+        .. versionadded:: 0.9
+        """
+        # if a string is set, it's encoded directly so that we
+        # can set the content length
+        if isinstance(value, str):
+            value = value.encode(self.charset)
+        else:
+            value = bytes(value)
+        self.response = [value]
+        if self.automatically_set_content_length:
+            self.headers["Content-Length"] = str(len(value))
+
+    data = property(
+        get_data,
+        set_data,
+        doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.",
+    )
+
+    def calculate_content_length(self) -> t.Optional[int]:
+        """Returns the content length if available or `None` otherwise."""
+        try:
+            self._ensure_sequence()
+        except RuntimeError:
+            return None
+        return sum(len(x) for x in self.iter_encoded())
+
+    def _ensure_sequence(self, mutable: bool = False) -> None:
+        """This method can be called by methods that need a sequence.  If
+        `mutable` is true, it will also ensure that the response sequence
+        is a standard Python list.
+
+        .. versionadded:: 0.6
+        """
+        if self.is_sequence:
+            # if we need a mutable object, we ensure it's a list.
+            if mutable and not isinstance(self.response, list):
+                self.response = list(self.response)  # type: ignore
+            return
+        if self.direct_passthrough:
+            raise RuntimeError(
+                "Attempted implicit sequence conversion but the"
+                " response object is in direct passthrough mode."
+            )
+        if not self.implicit_sequence_conversion:
+            raise RuntimeError(
+                "The response object required the iterable to be a"
+                " sequence, but the implicit conversion was disabled."
+                " Call make_sequence() yourself."
+            )
+        self.make_sequence()
+
+    def make_sequence(self) -> None:
+        """Converts the response iterator in a list.  By default this happens
+        automatically if required.  If `implicit_sequence_conversion` is
+        disabled, this method is not automatically called and some properties
+        might raise exceptions.  This also encodes all the items.
+
+        .. versionadded:: 0.6
+        """
+        if not self.is_sequence:
+            # if we consume an iterable we have to ensure that the close
+            # method of the iterable is called if available when we tear
+            # down the response
+            close = getattr(self.response, "close", None)
+            self.response = list(self.iter_encoded())
+            if close is not None:
+                self.call_on_close(close)
+
+    def iter_encoded(self) -> t.Iterator[bytes]:
+        """Iter the response encoded with the encoding of the response.
+        If the response object is invoked as WSGI application the return
+        value of this method is used as application iterator unless
+        :attr:`direct_passthrough` was activated.
+        """
+        if __debug__:
+            _warn_if_string(self.response)
+        # Encode in a separate function so that self.response is fetched
+        # early.  This allows us to wrap the response with the return
+        # value from get_app_iter or iter_encoded.
+        return _iter_encoded(self.response, self.charset)
+
+    @property
+    def is_streamed(self) -> bool:
+        """If the response is streamed (the response is not an iterable with
+        a length information) this property is `True`.  In this case streamed
+        means that there is no information about the number of iterations.
+        This is usually `True` if a generator is passed to the response object.
+
+        This is useful for checking before applying some sort of post
+        filtering that should not take place for streamed responses.
+        """
+        try:
+            len(self.response)  # type: ignore
+        except (TypeError, AttributeError):
+            return True
+        return False
+
+    @property
+    def is_sequence(self) -> bool:
+        """If the iterator is buffered, this property will be `True`.  A
+        response object will consider an iterator to be buffered if the
+        response attribute is a list or tuple.
+
+        .. versionadded:: 0.6
+        """
+        return isinstance(self.response, (tuple, list))
+
+    def close(self) -> None:
+        """Close the wrapped response if possible.  You can also use the object
+        in a with statement which will automatically close it.
+
+        .. versionadded:: 0.9
+           Can now be used in a with statement.
+        """
+        if hasattr(self.response, "close"):
+            self.response.close()  # type: ignore
+        for func in self._on_close:
+            func()
+
+    def __enter__(self) -> "Response":
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        self.close()
+
+    def freeze(self, no_etag: None = None) -> None:
+        """Make the response object ready to be pickled. Does the
+        following:
+
+        *   Buffer the response into a list, ignoring
+            :attr:`implicity_sequence_conversion` and
+            :attr:`direct_passthrough`.
+        *   Set the ``Content-Length`` header.
+        *   Generate an ``ETag`` header if one is not already set.
+
+        .. versionchanged:: 2.0
+            An ``ETag`` header is added, the ``no_etag`` parameter is
+            deprecated and will be removed in Werkzeug 2.1.
+
+        .. versionchanged:: 0.6
+            The ``Content-Length`` header is set.
+        """
+        # Always freeze the encoded response body, ignore
+        # implicit_sequence_conversion and direct_passthrough.
+        self.response = list(self.iter_encoded())
+        self.headers["Content-Length"] = str(sum(map(len, self.response)))
+
+        if no_etag is not None:
+            warnings.warn(
+                "The 'no_etag' parameter is deprecated and will be"
+                " removed in Werkzeug 2.1.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        self.add_etag()
+
+    def get_wsgi_headers(self, environ: "WSGIEnvironment") -> Headers:
+        """This is automatically called right before the response is started
+        and returns headers modified for the given environment.  It returns a
+        copy of the headers from the response with some modifications applied
+        if necessary.
+
+        For example the location header (if present) is joined with the root
+        URL of the environment.  Also the content length is automatically set
+        to zero here for certain status codes.
+
+        .. versionchanged:: 0.6
+           Previously that function was called `fix_headers` and modified
+           the response object in place.  Also since 0.6, IRIs in location
+           and content-location headers are handled properly.
+
+           Also starting with 0.6, Werkzeug will attempt to set the content
+           length if it is able to figure it out on its own.  This is the
+           case if all the strings in the response iterable are already
+           encoded and the iterable is buffered.
+
+        :param environ: the WSGI environment of the request.
+        :return: returns a new :class:`~werkzeug.datastructures.Headers`
+                 object.
+        """
+        headers = Headers(self.headers)
+        location: t.Optional[str] = None
+        content_location: t.Optional[str] = None
+        content_length: t.Optional[t.Union[str, int]] = None
+        status = self.status_code
+
+        # iterate over the headers to find all values in one go.  Because
+        # get_wsgi_headers is used each response that gives us a tiny
+        # speedup.
+        for key, value in headers:
+            ikey = key.lower()
+            if ikey == "location":
+                location = value
+            elif ikey == "content-location":
+                content_location = value
+            elif ikey == "content-length":
+                content_length = value
+
+        # make sure the location header is an absolute URL
+        if location is not None:
+            old_location = location
+            if isinstance(location, str):
+                # Safe conversion is necessary here as we might redirect
+                # to a broken URI scheme (for instance itms-services).
+                location = iri_to_uri(location, safe_conversion=True)
+
+            if self.autocorrect_location_header:
+                current_url = get_current_url(environ, strip_querystring=True)
+                if isinstance(current_url, str):
+                    current_url = iri_to_uri(current_url)
+                location = url_join(current_url, location)
+            if location != old_location:
+                headers["Location"] = location
+
+        # make sure the content location is a URL
+        if content_location is not None and isinstance(content_location, str):
+            headers["Content-Location"] = iri_to_uri(content_location)
+
+        if 100 <= status < 200 or status == 204:
+            # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a
+            # Content-Length header field in any response with a status
+            # code of 1xx (Informational) or 204 (No Content)."
+            headers.remove("Content-Length")
+        elif status == 304:
+            remove_entity_headers(headers)
+
+        # if we can determine the content length automatically, we
+        # should try to do that.  But only if this does not involve
+        # flattening the iterator or encoding of strings in the
+        # response. We however should not do that if we have a 304
+        # response.
+        if (
+            self.automatically_set_content_length
+            and self.is_sequence
+            and content_length is None
+            and status not in (204, 304)
+            and not (100 <= status < 200)
+        ):
+            try:
+                content_length = sum(len(_to_bytes(x, "ascii")) for x in self.response)
+            except UnicodeError:
+                # Something other than bytes, can't safely figure out
+                # the length of the response.
+                pass
+            else:
+                headers["Content-Length"] = str(content_length)
+
+        return headers
+
+    def get_app_iter(self, environ: "WSGIEnvironment") -> t.Iterable[bytes]:
+        """Returns the application iterator for the given environ.  Depending
+        on the request method and the current status code the return value
+        might be an empty response rather than the one from the response.
+
+        If the request method is `HEAD` or the status code is in a range
+        where the HTTP specification requires an empty response, an empty
+        iterable is returned.
+
+        .. versionadded:: 0.6
+
+        :param environ: the WSGI environment of the request.
+        :return: a response iterable.
+        """
+        status = self.status_code
+        if (
+            environ["REQUEST_METHOD"] == "HEAD"
+            or 100 <= status < 200
+            or status in (204, 304)
+        ):
+            iterable: t.Iterable[bytes] = ()
+        elif self.direct_passthrough:
+            if __debug__:
+                _warn_if_string(self.response)
+            return self.response  # type: ignore
+        else:
+            iterable = self.iter_encoded()
+        return ClosingIterator(iterable, self.close)
+
+    def get_wsgi_response(
+        self, environ: "WSGIEnvironment"
+    ) -> t.Tuple[t.Iterable[bytes], str, t.List[t.Tuple[str, str]]]:
+        """Returns the final WSGI response as tuple.  The first item in
+        the tuple is the application iterator, the second the status and
+        the third the list of headers.  The response returned is created
+        specially for the given environment.  For example if the request
+        method in the WSGI environment is ``'HEAD'`` the response will
+        be empty and only the headers and status code will be present.
+
+        .. versionadded:: 0.6
+
+        :param environ: the WSGI environment of the request.
+        :return: an ``(app_iter, status, headers)`` tuple.
+        """
+        headers = self.get_wsgi_headers(environ)
+        app_iter = self.get_app_iter(environ)
+        return app_iter, self.status, headers.to_wsgi_list()
+
+    def __call__(
+        self, environ: "WSGIEnvironment", start_response: "StartResponse"
+    ) -> t.Iterable[bytes]:
+        """Process this response as WSGI application.
+
+        :param environ: the WSGI environment.
+        :param start_response: the response callable provided by the WSGI
+                               server.
+        :return: an application iterator
+        """
+        app_iter, status, headers = self.get_wsgi_response(environ)
+        start_response(status, headers)
+        return app_iter
+
+    # JSON
+
+    #: A module or other object that has ``dumps`` and ``loads``
+    #: functions that match the API of the built-in :mod:`json` module.
+    json_module = json
+
+    @property
+    def json(self) -> t.Optional[t.Any]:
+        """The parsed JSON data if :attr:`mimetype` indicates JSON
+        (:mimetype:`application/json`, see :meth:`is_json`).
+
+        Calls :meth:`get_json` with default arguments.
+        """
+        return self.get_json()
+
+    def get_json(self, force: bool = False, silent: bool = False) -> t.Optional[t.Any]:
+        """Parse :attr:`data` as JSON. Useful during testing.
+
+        If the mimetype does not indicate JSON
+        (:mimetype:`application/json`, see :meth:`is_json`), this
+        returns ``None``.
+
+        Unlike :meth:`Request.get_json`, the result is not cached.
+
+        :param force: Ignore the mimetype and always try to parse JSON.
+        :param silent: Silence parsing errors and return ``None``
+            instead.
+        """
+        if not (force or self.is_json):
+            return None
+
+        data = self.get_data()
+
+        try:
+            return self.json_module.loads(data)
+        except ValueError:
+            if not silent:
+                raise
+
+            return None
+
+    # Stream
+
+    @cached_property
+    def stream(self) -> "ResponseStream":
+        """The response iterable as write-only stream."""
+        return ResponseStream(self)
+
+    def _wrap_range_response(self, start: int, length: int) -> None:
+        """Wrap existing Response in case of Range Request context."""
+        if self.status_code == 206:
+            self.response = _RangeWrapper(self.response, start, length)  # type: ignore
+
+    def _is_range_request_processable(self, environ: "WSGIEnvironment") -> bool:
+        """Return ``True`` if `Range` header is present and if underlying
+        resource is considered unchanged when compared with `If-Range` header.
+        """
+        return (
+            "HTTP_IF_RANGE" not in environ
+            or not is_resource_modified(
+                environ,
+                self.headers.get("etag"),
+                None,
+                self.headers.get("last-modified"),
+                ignore_if_range=False,
+            )
+        ) and "HTTP_RANGE" in environ
+
+    def _process_range_request(
+        self,
+        environ: "WSGIEnvironment",
+        complete_length: t.Optional[int] = None,
+        accept_ranges: t.Optional[t.Union[bool, str]] = None,
+    ) -> bool:
+        """Handle Range Request related headers (RFC7233).  If `Accept-Ranges`
+        header is valid, and Range Request is processable, we set the headers
+        as described by the RFC, and wrap the underlying response in a
+        RangeWrapper.
+
+        Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise.
+
+        :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable`
+                 if `Range` header could not be parsed or satisfied.
+
+        .. versionchanged:: 2.0
+            Returns ``False`` if the length is 0.
+        """
+        from ..exceptions import RequestedRangeNotSatisfiable
+
+        if (
+            accept_ranges is None
+            or complete_length is None
+            or complete_length == 0
+            or not self._is_range_request_processable(environ)
+        ):
+            return False
+
+        parsed_range = parse_range_header(environ.get("HTTP_RANGE"))
+
+        if parsed_range is None:
+            raise RequestedRangeNotSatisfiable(complete_length)
+
+        range_tuple = parsed_range.range_for_length(complete_length)
+        content_range_header = parsed_range.to_content_range_header(complete_length)
+
+        if range_tuple is None or content_range_header is None:
+            raise RequestedRangeNotSatisfiable(complete_length)
+
+        content_length = range_tuple[1] - range_tuple[0]
+        self.headers["Content-Length"] = content_length
+        self.headers["Accept-Ranges"] = accept_ranges
+        self.content_range = content_range_header  # type: ignore
+        self.status_code = 206
+        self._wrap_range_response(range_tuple[0], content_length)
+        return True
+
+    def make_conditional(
+        self,
+        request_or_environ: "WSGIEnvironment",
+        accept_ranges: t.Union[bool, str] = False,
+        complete_length: t.Optional[int] = None,
+    ) -> "Response":
+        """Make the response conditional to the request.  This method works
+        best if an etag was defined for the response already.  The `add_etag`
+        method can be used to do that.  If called without etag just the date
+        header is set.
+
+        This does nothing if the request method in the request or environ is
+        anything but GET or HEAD.
+
+        For optimal performance when handling range requests, it's recommended
+        that your response data object implements `seekable`, `seek` and `tell`
+        methods as described by :py:class:`io.IOBase`.  Objects returned by
+        :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods.
+
+        It does not remove the body of the response because that's something
+        the :meth:`__call__` function does for us automatically.
+
+        Returns self so that you can do ``return resp.make_conditional(req)``
+        but modifies the object in-place.
+
+        :param request_or_environ: a request object or WSGI environment to be
+                                   used to make the response conditional
+                                   against.
+        :param accept_ranges: This parameter dictates the value of
+                              `Accept-Ranges` header. If ``False`` (default),
+                              the header is not set. If ``True``, it will be set
+                              to ``"bytes"``. If ``None``, it will be set to
+                              ``"none"``. If it's a string, it will use this
+                              value.
+        :param complete_length: Will be used only in valid Range Requests.
+                                It will set `Content-Range` complete length
+                                value and compute `Content-Length` real value.
+                                This parameter is mandatory for successful
+                                Range Requests completion.
+        :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable`
+                 if `Range` header could not be parsed or satisfied.
+
+        .. versionchanged:: 2.0
+            Range processing is skipped if length is 0 instead of
+            raising a 416 Range Not Satisfiable error.
+        """
+        environ = _get_environ(request_or_environ)
+        if environ["REQUEST_METHOD"] in ("GET", "HEAD"):
+            # if the date is not in the headers, add it now.  We however
+            # will not override an already existing header.  Unfortunately
+            # this header will be overriden by many WSGI servers including
+            # wsgiref.
+            if "date" not in self.headers:
+                self.headers["Date"] = http_date()
+            accept_ranges = _clean_accept_ranges(accept_ranges)
+            is206 = self._process_range_request(environ, complete_length, accept_ranges)
+            if not is206 and not is_resource_modified(
+                environ,
+                self.headers.get("etag"),
+                None,
+                self.headers.get("last-modified"),
+            ):
+                if parse_etags(environ.get("HTTP_IF_MATCH")):
+                    self.status_code = 412
+                else:
+                    self.status_code = 304
+            if (
+                self.automatically_set_content_length
+                and "content-length" not in self.headers
+            ):
+                length = self.calculate_content_length()
+                if length is not None:
+                    self.headers["Content-Length"] = length
+        return self
+
+    def add_etag(self, overwrite: bool = False, weak: bool = False) -> None:
+        """Add an etag for the current response if there is none yet.
+
+        .. versionchanged:: 2.0
+            SHA-1 is used to generate the value. MD5 may not be
+            available in some environments.
+        """
+        if overwrite or "etag" not in self.headers:
+            self.set_etag(generate_etag(self.get_data()), weak)
+
+
+class ResponseStream:
+    """A file descriptor like object used by the :class:`ResponseStreamMixin` to
+    represent the body of the stream.  It directly pushes into the response
+    iterable of the response object.
+    """
+
+    mode = "wb+"
+
+    def __init__(self, response: Response):
+        self.response = response
+        self.closed = False
+
+    def write(self, value: bytes) -> int:
+        if self.closed:
+            raise ValueError("I/O operation on closed file")
+        self.response._ensure_sequence(mutable=True)
+        self.response.response.append(value)  # type: ignore
+        self.response.headers.pop("Content-Length", None)
+        return len(value)
+
+    def writelines(self, seq: t.Iterable[bytes]) -> None:
+        for item in seq:
+            self.write(item)
+
+    def close(self) -> None:
+        self.closed = True
+
+    def flush(self) -> None:
+        if self.closed:
+            raise ValueError("I/O operation on closed file")
+
+    def isatty(self) -> bool:
+        if self.closed:
+            raise ValueError("I/O operation on closed file")
+        return False
+
+    def tell(self) -> int:
+        self.response._ensure_sequence()
+        return sum(map(len, self.response.response))
+
+    @property
+    def encoding(self) -> str:
+        return self.response.charset
+
+
+class ResponseStreamMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'ResponseStreamMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Response' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wrappers/user_agent.py b/venv/lib/python3.7/site-packages/werkzeug/wrappers/user_agent.py
new file mode 100644
index 00000000..184ffd02
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wrappers/user_agent.py
@@ -0,0 +1,14 @@
+import typing as t
+import warnings
+
+
+class UserAgentMixin:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        warnings.warn(
+            "'UserAgentMixin' is deprecated and will be removed in"
+            " Werkzeug 2.1. 'Request' now includes the functionality"
+            " directly.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)  # type: ignore
diff --git a/venv/lib/python3.7/site-packages/werkzeug/wsgi.py b/venv/lib/python3.7/site-packages/werkzeug/wsgi.py
new file mode 100644
index 00000000..9439a1e5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/werkzeug/wsgi.py
@@ -0,0 +1,982 @@
+import io
+import re
+import typing as t
+from functools import partial
+from functools import update_wrapper
+from itertools import chain
+
+from ._internal import _make_encode_wrapper
+from ._internal import _to_bytes
+from ._internal import _to_str
+from .sansio import utils as _sansio_utils
+from .sansio.utils import host_is_trusted  # noqa: F401 # Imported as part of API
+from .urls import _URLTuple
+from .urls import uri_to_iri
+from .urls import url_join
+from .urls import url_parse
+from .urls import url_quote
+
+if t.TYPE_CHECKING:
+    from _typeshed.wsgi import WSGIApplication
+    from _typeshed.wsgi import WSGIEnvironment
+
+
+def responder(f: t.Callable[..., "WSGIApplication"]) -> "WSGIApplication":
+    """Marks a function as responder.  Decorate a function with it and it
+    will automatically call the return value as WSGI application.
+
+    Example::
+
+        @responder
+        def application(environ, start_response):
+            return Response('Hello World!')
+    """
+    return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
+
+
+def get_current_url(
+    environ: "WSGIEnvironment",
+    root_only: bool = False,
+    strip_querystring: bool = False,
+    host_only: bool = False,
+    trusted_hosts: t.Optional[t.Iterable[str]] = None,
+) -> str:
+    """Recreate the URL for a request from the parts in a WSGI
+    environment.
+
+    The URL is an IRI, not a URI, so it may contain Unicode characters.
+    Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII.
+
+    :param environ: The WSGI environment to get the URL parts from.
+    :param root_only: Only build the root path, don't include the
+        remaining path or query string.
+    :param strip_querystring: Don't include the query string.
+    :param host_only: Only build the scheme and host.
+    :param trusted_hosts: A list of trusted host names to validate the
+        host against.
+    """
+    parts = {
+        "scheme": environ["wsgi.url_scheme"],
+        "host": get_host(environ, trusted_hosts),
+    }
+
+    if not host_only:
+        parts["root_path"] = environ.get("SCRIPT_NAME", "")
+
+        if not root_only:
+            parts["path"] = environ.get("PATH_INFO", "")
+
+            if not strip_querystring:
+                parts["query_string"] = environ.get("QUERY_STRING", "").encode("latin1")
+
+    return _sansio_utils.get_current_url(**parts)
+
+
+def _get_server(
+    environ: "WSGIEnvironment",
+) -> t.Optional[t.Tuple[str, t.Optional[int]]]:
+    name = environ.get("SERVER_NAME")
+
+    if name is None:
+        return None
+
+    try:
+        port: t.Optional[int] = int(environ.get("SERVER_PORT", None))
+    except (TypeError, ValueError):
+        # unix socket
+        port = None
+
+    return name, port
+
+
+def get_host(
+    environ: "WSGIEnvironment", trusted_hosts: t.Optional[t.Iterable[str]] = None
+) -> str:
+    """Return the host for the given WSGI environment.
+
+    The ``Host`` header is preferred, then ``SERVER_NAME`` if it's not
+    set. The returned host will only contain the port if it is different
+    than the standard port for the protocol.
+
+    Optionally, verify that the host is trusted using
+    :func:`host_is_trusted` and raise a
+    :exc:`~werkzeug.exceptions.SecurityError` if it is not.
+
+    :param environ: A WSGI environment dict.
+    :param trusted_hosts: A list of trusted host names.
+
+    :return: Host, with port if necessary.
+    :raise ~werkzeug.exceptions.SecurityError: If the host is not
+        trusted.
+    """
+    return _sansio_utils.get_host(
+        environ["wsgi.url_scheme"],
+        environ.get("HTTP_HOST"),
+        _get_server(environ),
+        trusted_hosts,
+    )
+
+
+def get_content_length(environ: "WSGIEnvironment") -> t.Optional[int]:
+    """Returns the content length from the WSGI environment as
+    integer. If it's not available or chunked transfer encoding is used,
+    ``None`` is returned.
+
+    .. versionadded:: 0.9
+
+    :param environ: the WSGI environ to fetch the content length from.
+    """
+    if environ.get("HTTP_TRANSFER_ENCODING", "") == "chunked":
+        return None
+
+    content_length = environ.get("CONTENT_LENGTH")
+    if content_length is not None:
+        try:
+            return max(0, int(content_length))
+        except (ValueError, TypeError):
+            pass
+    return None
+
+
+def get_input_stream(
+    environ: "WSGIEnvironment", safe_fallback: bool = True
+) -> t.BinaryIO:
+    """Returns the input stream from the WSGI environment and wraps it
+    in the most sensible way possible. The stream returned is not the
+    raw WSGI stream in most cases but one that is safe to read from
+    without taking into account the content length.
+
+    If content length is not set, the stream will be empty for safety reasons.
+    If the WSGI server supports chunked or infinite streams, it should set
+    the ``wsgi.input_terminated`` value in the WSGI environ to indicate that.
+
+    .. versionadded:: 0.9
+
+    :param environ: the WSGI environ to fetch the stream from.
+    :param safe_fallback: use an empty stream as a safe fallback when the
+        content length is not set. Disabling this allows infinite streams,
+        which can be a denial-of-service risk.
+    """
+    stream = t.cast(t.BinaryIO, environ["wsgi.input"])
+    content_length = get_content_length(environ)
+
+    # A wsgi extension that tells us if the input is terminated.  In
+    # that case we return the stream unchanged as we know we can safely
+    # read it until the end.
+    if environ.get("wsgi.input_terminated"):
+        return stream
+
+    # If the request doesn't specify a content length, returning the stream is
+    # potentially dangerous because it could be infinite, malicious or not. If
+    # safe_fallback is true, return an empty stream instead for safety.
+    if content_length is None:
+        return io.BytesIO() if safe_fallback else stream
+
+    # Otherwise limit the stream to the content length
+    return t.cast(t.BinaryIO, LimitedStream(stream, content_length))
+
+
+def get_query_string(environ: "WSGIEnvironment") -> str:
+    """Returns the ``QUERY_STRING`` from the WSGI environment. This also
+    takes care of the WSGI decoding dance. The string returned will be
+    restricted to ASCII characters.
+
+    :param environ: WSGI environment to get the query string from.
+
+    .. versionadded:: 0.9
+    """
+    qs = environ.get("QUERY_STRING", "").encode("latin1")
+    # QUERY_STRING really should be ascii safe but some browsers
+    # will send us some unicode stuff (I am looking at you IE).
+    # In that case we want to urllib quote it badly.
+    return url_quote(qs, safe=":&%=+$!*'(),")
+
+
+def get_path_info(
+    environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace"
+) -> str:
+    """Return the ``PATH_INFO`` from the WSGI environment and decode it
+    unless ``charset`` is ``None``.
+
+    :param environ: WSGI environment to get the path from.
+    :param charset: The charset for the path info, or ``None`` if no
+        decoding should be performed.
+    :param errors: The decoding error handling.
+
+    .. versionadded:: 0.9
+    """
+    path = environ.get("PATH_INFO", "").encode("latin1")
+    return _to_str(path, charset, errors, allow_none_charset=True)  # type: ignore
+
+
+def get_script_name(
+    environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace"
+) -> str:
+    """Return the ``SCRIPT_NAME`` from the WSGI environment and decode
+    it unless `charset` is set to ``None``.
+
+    :param environ: WSGI environment to get the path from.
+    :param charset: The charset for the path, or ``None`` if no decoding
+        should be performed.
+    :param errors: The decoding error handling.
+
+    .. versionadded:: 0.9
+    """
+    path = environ.get("SCRIPT_NAME", "").encode("latin1")
+    return _to_str(path, charset, errors, allow_none_charset=True)  # type: ignore
+
+
+def pop_path_info(
+    environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace"
+) -> t.Optional[str]:
+    """Removes and returns the next segment of `PATH_INFO`, pushing it onto
+    `SCRIPT_NAME`.  Returns `None` if there is nothing left on `PATH_INFO`.
+
+    If the `charset` is set to `None` bytes are returned.
+
+    If there are empty segments (``'/foo//bar``) these are ignored but
+    properly pushed to the `SCRIPT_NAME`:
+
+    >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
+    >>> pop_path_info(env)
+    'a'
+    >>> env['SCRIPT_NAME']
+    '/foo/a'
+    >>> pop_path_info(env)
+    'b'
+    >>> env['SCRIPT_NAME']
+    '/foo/a/b'
+
+    .. versionadded:: 0.5
+
+    .. versionchanged:: 0.9
+       The path is now decoded and a charset and encoding
+       parameter can be provided.
+
+    :param environ: the WSGI environment that is modified.
+    :param charset: The ``encoding`` parameter passed to
+        :func:`bytes.decode`.
+    :param errors: The ``errors`` paramater passed to
+        :func:`bytes.decode`.
+    """
+    path = environ.get("PATH_INFO")
+    if not path:
+        return None
+
+    script_name = environ.get("SCRIPT_NAME", "")
+
+    # shift multiple leading slashes over
+    old_path = path
+    path = path.lstrip("/")
+    if path != old_path:
+        script_name += "/" * (len(old_path) - len(path))
+
+    if "/" not in path:
+        environ["PATH_INFO"] = ""
+        environ["SCRIPT_NAME"] = script_name + path
+        rv = path.encode("latin1")
+    else:
+        segment, path = path.split("/", 1)
+        environ["PATH_INFO"] = f"/{path}"
+        environ["SCRIPT_NAME"] = script_name + segment
+        rv = segment.encode("latin1")
+
+    return _to_str(rv, charset, errors, allow_none_charset=True)  # type: ignore
+
+
+def peek_path_info(
+    environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace"
+) -> t.Optional[str]:
+    """Returns the next segment on the `PATH_INFO` or `None` if there
+    is none.  Works like :func:`pop_path_info` without modifying the
+    environment:
+
+    >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
+    >>> peek_path_info(env)
+    'a'
+    >>> peek_path_info(env)
+    'a'
+
+    If the `charset` is set to `None` bytes are returned.
+
+    .. versionadded:: 0.5
+
+    .. versionchanged:: 0.9
+       The path is now decoded and a charset and encoding
+       parameter can be provided.
+
+    :param environ: the WSGI environment that is checked.
+    """
+    segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1)
+    if segments:
+        return _to_str(  # type: ignore
+            segments[0].encode("latin1"), charset, errors, allow_none_charset=True
+        )
+    return None
+
+
+def extract_path_info(
+    environ_or_baseurl: t.Union[str, "WSGIEnvironment"],
+    path_or_url: t.Union[str, _URLTuple],
+    charset: str = "utf-8",
+    errors: str = "werkzeug.url_quote",
+    collapse_http_schemes: bool = True,
+) -> t.Optional[str]:
+    """Extracts the path info from the given URL (or WSGI environment) and
+    path. The path info returned is a string. The URLs might also be IRIs.
+
+    If the path info could not be determined, `None` is returned.
+
+    Some examples:
+
+    >>> extract_path_info('http://example.com/app', '/app/hello')
+    '/hello'
+    >>> extract_path_info('http://example.com/app',
+    ...                   'https://example.com/app/hello')
+    '/hello'
+    >>> extract_path_info('http://example.com/app',
+    ...                   'https://example.com/app/hello',
+    ...                   collapse_http_schemes=False) is None
+    True
+
+    Instead of providing a base URL you can also pass a WSGI environment.
+
+    :param environ_or_baseurl: a WSGI environment dict, a base URL or
+                               base IRI.  This is the root of the
+                               application.
+    :param path_or_url: an absolute path from the server root, a
+                        relative path (in which case it's the path info)
+                        or a full URL.
+    :param charset: the charset for byte data in URLs
+    :param errors: the error handling on decode
+    :param collapse_http_schemes: if set to `False` the algorithm does
+                                  not assume that http and https on the
+                                  same server point to the same
+                                  resource.
+
+    .. versionchanged:: 0.15
+        The ``errors`` parameter defaults to leaving invalid bytes
+        quoted instead of replacing them.
+
+    .. versionadded:: 0.6
+    """
+
+    def _normalize_netloc(scheme: str, netloc: str) -> str:
+        parts = netloc.split("@", 1)[-1].split(":", 1)
+        port: t.Optional[str]
+
+        if len(parts) == 2:
+            netloc, port = parts
+            if (scheme == "http" and port == "80") or (
+                scheme == "https" and port == "443"
+            ):
+                port = None
+        else:
+            netloc = parts[0]
+            port = None
+
+        if port is not None:
+            netloc += f":{port}"
+
+        return netloc
+
+    # make sure whatever we are working on is a IRI and parse it
+    path = uri_to_iri(path_or_url, charset, errors)
+    if isinstance(environ_or_baseurl, dict):
+        environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True)
+    base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
+    base_scheme, base_netloc, base_path = url_parse(base_iri)[:3]
+    cur_scheme, cur_netloc, cur_path = url_parse(url_join(base_iri, path))[:3]
+
+    # normalize the network location
+    base_netloc = _normalize_netloc(base_scheme, base_netloc)
+    cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
+
+    # is that IRI even on a known HTTP scheme?
+    if collapse_http_schemes:
+        for scheme in base_scheme, cur_scheme:
+            if scheme not in ("http", "https"):
+                return None
+    else:
+        if not (base_scheme in ("http", "https") and base_scheme == cur_scheme):
+            return None
+
+    # are the netlocs compatible?
+    if base_netloc != cur_netloc:
+        return None
+
+    # are we below the application path?
+    base_path = base_path.rstrip("/")
+    if not cur_path.startswith(base_path):
+        return None
+
+    return f"/{cur_path[len(base_path) :].lstrip('/')}"
+
+
+class ClosingIterator:
+    """The WSGI specification requires that all middlewares and gateways
+    respect the `close` callback of the iterable returned by the application.
+    Because it is useful to add another close action to a returned iterable
+    and adding a custom iterable is a boring task this class can be used for
+    that::
+
+        return ClosingIterator(app(environ, start_response), [cleanup_session,
+                                                              cleanup_locals])
+
+    If there is just one close function it can be passed instead of the list.
+
+    A closing iterator is not needed if the application uses response objects
+    and finishes the processing if the response is started::
+
+        try:
+            return response(environ, start_response)
+        finally:
+            cleanup_session()
+            cleanup_locals()
+    """
+
+    def __init__(
+        self,
+        iterable: t.Iterable[bytes],
+        callbacks: t.Optional[
+            t.Union[t.Callable[[], None], t.Iterable[t.Callable[[], None]]]
+        ] = None,
+    ) -> None:
+        iterator = iter(iterable)
+        self._next = t.cast(t.Callable[[], bytes], partial(next, iterator))
+        if callbacks is None:
+            callbacks = []
+        elif callable(callbacks):
+            callbacks = [callbacks]
+        else:
+            callbacks = list(callbacks)
+        iterable_close = getattr(iterable, "close", None)
+        if iterable_close:
+            callbacks.insert(0, iterable_close)
+        self._callbacks = callbacks
+
+    def __iter__(self) -> "ClosingIterator":
+        return self
+
+    def __next__(self) -> bytes:
+        return self._next()
+
+    def close(self) -> None:
+        for callback in self._callbacks:
+            callback()
+
+
+def wrap_file(
+    environ: "WSGIEnvironment", file: t.BinaryIO, buffer_size: int = 8192
+) -> t.Iterable[bytes]:
+    """Wraps a file.  This uses the WSGI server's file wrapper if available
+    or otherwise the generic :class:`FileWrapper`.
+
+    .. versionadded:: 0.5
+
+    If the file wrapper from the WSGI server is used it's important to not
+    iterate over it from inside the application but to pass it through
+    unchanged.  If you want to pass out a file wrapper inside a response
+    object you have to set :attr:`Response.direct_passthrough` to `True`.
+
+    More information about file wrappers are available in :pep:`333`.
+
+    :param file: a :class:`file`-like object with a :meth:`~file.read` method.
+    :param buffer_size: number of bytes for one iteration.
+    """
+    return environ.get("wsgi.file_wrapper", FileWrapper)(  # type: ignore
+        file, buffer_size
+    )
+
+
+class FileWrapper:
+    """This class can be used to convert a :class:`file`-like object into
+    an iterable.  It yields `buffer_size` blocks until the file is fully
+    read.
+
+    You should not use this class directly but rather use the
+    :func:`wrap_file` function that uses the WSGI server's file wrapper
+    support if it's available.
+
+    .. versionadded:: 0.5
+
+    If you're using this object together with a :class:`Response` you have
+    to use the `direct_passthrough` mode.
+
+    :param file: a :class:`file`-like object with a :meth:`~file.read` method.
+    :param buffer_size: number of bytes for one iteration.
+    """
+
+    def __init__(self, file: t.BinaryIO, buffer_size: int = 8192) -> None:
+        self.file = file
+        self.buffer_size = buffer_size
+
+    def close(self) -> None:
+        if hasattr(self.file, "close"):
+            self.file.close()
+
+    def seekable(self) -> bool:
+        if hasattr(self.file, "seekable"):
+            return self.file.seekable()
+        if hasattr(self.file, "seek"):
+            return True
+        return False
+
+    def seek(self, *args: t.Any) -> None:
+        if hasattr(self.file, "seek"):
+            self.file.seek(*args)
+
+    def tell(self) -> t.Optional[int]:
+        if hasattr(self.file, "tell"):
+            return self.file.tell()
+        return None
+
+    def __iter__(self) -> "FileWrapper":
+        return self
+
+    def __next__(self) -> bytes:
+        data = self.file.read(self.buffer_size)
+        if data:
+            return data
+        raise StopIteration()
+
+
+class _RangeWrapper:
+    # private for now, but should we make it public in the future ?
+
+    """This class can be used to convert an iterable object into
+    an iterable that will only yield a piece of the underlying content.
+    It yields blocks until the underlying stream range is fully read.
+    The yielded blocks will have a size that can't exceed the original
+    iterator defined block size, but that can be smaller.
+
+    If you're using this object together with a :class:`Response` you have
+    to use the `direct_passthrough` mode.
+
+    :param iterable: an iterable object with a :meth:`__next__` method.
+    :param start_byte: byte from which read will start.
+    :param byte_range: how many bytes to read.
+    """
+
+    def __init__(
+        self,
+        iterable: t.Union[t.Iterable[bytes], t.BinaryIO],
+        start_byte: int = 0,
+        byte_range: t.Optional[int] = None,
+    ):
+        self.iterable = iter(iterable)
+        self.byte_range = byte_range
+        self.start_byte = start_byte
+        self.end_byte = None
+
+        if byte_range is not None:
+            self.end_byte = start_byte + byte_range
+
+        self.read_length = 0
+        self.seekable = (
+            hasattr(iterable, "seekable") and iterable.seekable()  # type: ignore
+        )
+        self.end_reached = False
+
+    def __iter__(self) -> "_RangeWrapper":
+        return self
+
+    def _next_chunk(self) -> bytes:
+        try:
+            chunk = next(self.iterable)
+            self.read_length += len(chunk)
+            return chunk
+        except StopIteration:
+            self.end_reached = True
+            raise
+
+    def _first_iteration(self) -> t.Tuple[t.Optional[bytes], int]:
+        chunk = None
+        if self.seekable:
+            self.iterable.seek(self.start_byte)  # type: ignore
+            self.read_length = self.iterable.tell()  # type: ignore
+            contextual_read_length = self.read_length
+        else:
+            while self.read_length <= self.start_byte:
+                chunk = self._next_chunk()
+            if chunk is not None:
+                chunk = chunk[self.start_byte - self.read_length :]
+            contextual_read_length = self.start_byte
+        return chunk, contextual_read_length
+
+    def _next(self) -> bytes:
+        if self.end_reached:
+            raise StopIteration()
+        chunk = None
+        contextual_read_length = self.read_length
+        if self.read_length == 0:
+            chunk, contextual_read_length = self._first_iteration()
+        if chunk is None:
+            chunk = self._next_chunk()
+        if self.end_byte is not None and self.read_length >= self.end_byte:
+            self.end_reached = True
+            return chunk[: self.end_byte - contextual_read_length]
+        return chunk
+
+    def __next__(self) -> bytes:
+        chunk = self._next()
+        if chunk:
+            return chunk
+        self.end_reached = True
+        raise StopIteration()
+
+    def close(self) -> None:
+        if hasattr(self.iterable, "close"):
+            self.iterable.close()  # type: ignore
+
+
+def _make_chunk_iter(
+    stream: t.Union[t.Iterable[bytes], t.BinaryIO],
+    limit: t.Optional[int],
+    buffer_size: int,
+) -> t.Iterator[bytes]:
+    """Helper for the line and chunk iter functions."""
+    if isinstance(stream, (bytes, bytearray, str)):
+        raise TypeError(
+            "Passed a string or byte object instead of true iterator or stream."
+        )
+    if not hasattr(stream, "read"):
+        for item in stream:
+            if item:
+                yield item
+        return
+    stream = t.cast(t.BinaryIO, stream)
+    if not isinstance(stream, LimitedStream) and limit is not None:
+        stream = t.cast(t.BinaryIO, LimitedStream(stream, limit))
+    _read = stream.read
+    while True:
+        item = _read(buffer_size)
+        if not item:
+            break
+        yield item
+
+
+def make_line_iter(
+    stream: t.Union[t.Iterable[bytes], t.BinaryIO],
+    limit: t.Optional[int] = None,
+    buffer_size: int = 10 * 1024,
+    cap_at_buffer: bool = False,
+) -> t.Iterator[bytes]:
+    """Safely iterates line-based over an input stream.  If the input stream
+    is not a :class:`LimitedStream` the `limit` parameter is mandatory.
+
+    This uses the stream's :meth:`~file.read` method internally as opposite
+    to the :meth:`~file.readline` method that is unsafe and can only be used
+    in violation of the WSGI specification.  The same problem applies to the
+    `__iter__` function of the input stream which calls :meth:`~file.readline`
+    without arguments.
+
+    If you need line-by-line processing it's strongly recommended to iterate
+    over the input stream using this helper function.
+
+    .. versionchanged:: 0.8
+       This function now ensures that the limit was reached.
+
+    .. versionadded:: 0.9
+       added support for iterators as input stream.
+
+    .. versionadded:: 0.11.10
+       added support for the `cap_at_buffer` parameter.
+
+    :param stream: the stream or iterate to iterate over.
+    :param limit: the limit in bytes for the stream.  (Usually
+                  content length.  Not necessary if the `stream`
+                  is a :class:`LimitedStream`.
+    :param buffer_size: The optional buffer size.
+    :param cap_at_buffer: if this is set chunks are split if they are longer
+                          than the buffer size.  Internally this is implemented
+                          that the buffer size might be exhausted by a factor
+                          of two however.
+    """
+    _iter = _make_chunk_iter(stream, limit, buffer_size)
+
+    first_item = next(_iter, "")
+    if not first_item:
+        return
+
+    s = _make_encode_wrapper(first_item)
+    empty = t.cast(bytes, s(""))
+    cr = t.cast(bytes, s("\r"))
+    lf = t.cast(bytes, s("\n"))
+    crlf = t.cast(bytes, s("\r\n"))
+
+    _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter))
+
+    def _iter_basic_lines() -> t.Iterator[bytes]:
+        _join = empty.join
+        buffer: t.List[bytes] = []
+        while True:
+            new_data = next(_iter, "")
+            if not new_data:
+                break
+            new_buf: t.List[bytes] = []
+            buf_size = 0
+            for item in t.cast(
+                t.Iterator[bytes], chain(buffer, new_data.splitlines(True))
+            ):
+                new_buf.append(item)
+                buf_size += len(item)
+                if item and item[-1:] in crlf:
+                    yield _join(new_buf)
+                    new_buf = []
+                elif cap_at_buffer and buf_size >= buffer_size:
+                    rv = _join(new_buf)
+                    while len(rv) >= buffer_size:
+                        yield rv[:buffer_size]
+                        rv = rv[buffer_size:]
+                    new_buf = [rv]
+            buffer = new_buf
+        if buffer:
+            yield _join(buffer)
+
+    # This hackery is necessary to merge 'foo\r' and '\n' into one item
+    # of 'foo\r\n' if we were unlucky and we hit a chunk boundary.
+    previous = empty
+    for item in _iter_basic_lines():
+        if item == lf and previous[-1:] == cr:
+            previous += item
+            item = empty
+        if previous:
+            yield previous
+        previous = item
+    if previous:
+        yield previous
+
+
+def make_chunk_iter(
+    stream: t.Union[t.Iterable[bytes], t.BinaryIO],
+    separator: bytes,
+    limit: t.Optional[int] = None,
+    buffer_size: int = 10 * 1024,
+    cap_at_buffer: bool = False,
+) -> t.Iterator[bytes]:
+    """Works like :func:`make_line_iter` but accepts a separator
+    which divides chunks.  If you want newline based processing
+    you should use :func:`make_line_iter` instead as it
+    supports arbitrary newline markers.
+
+    .. versionadded:: 0.8
+
+    .. versionadded:: 0.9
+       added support for iterators as input stream.
+
+    .. versionadded:: 0.11.10
+       added support for the `cap_at_buffer` parameter.
+
+    :param stream: the stream or iterate to iterate over.
+    :param separator: the separator that divides chunks.
+    :param limit: the limit in bytes for the stream.  (Usually
+                  content length.  Not necessary if the `stream`
+                  is otherwise already limited).
+    :param buffer_size: The optional buffer size.
+    :param cap_at_buffer: if this is set chunks are split if they are longer
+                          than the buffer size.  Internally this is implemented
+                          that the buffer size might be exhausted by a factor
+                          of two however.
+    """
+    _iter = _make_chunk_iter(stream, limit, buffer_size)
+
+    first_item = next(_iter, b"")
+    if not first_item:
+        return
+
+    _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter))
+    if isinstance(first_item, str):
+        separator = _to_str(separator)
+        _split = re.compile(f"({re.escape(separator)})").split
+        _join = "".join
+    else:
+        separator = _to_bytes(separator)
+        _split = re.compile(b"(" + re.escape(separator) + b")").split
+        _join = b"".join
+
+    buffer: t.List[bytes] = []
+    while True:
+        new_data = next(_iter, b"")
+        if not new_data:
+            break
+        chunks = _split(new_data)
+        new_buf: t.List[bytes] = []
+        buf_size = 0
+        for item in chain(buffer, chunks):
+            if item == separator:
+                yield _join(new_buf)
+                new_buf = []
+                buf_size = 0
+            else:
+                buf_size += len(item)
+                new_buf.append(item)
+
+                if cap_at_buffer and buf_size >= buffer_size:
+                    rv = _join(new_buf)
+                    while len(rv) >= buffer_size:
+                        yield rv[:buffer_size]
+                        rv = rv[buffer_size:]
+                    new_buf = [rv]
+                    buf_size = len(rv)
+
+        buffer = new_buf
+    if buffer:
+        yield _join(buffer)
+
+
+class LimitedStream(io.IOBase):
+    """Wraps a stream so that it doesn't read more than n bytes.  If the
+    stream is exhausted and the caller tries to get more bytes from it
+    :func:`on_exhausted` is called which by default returns an empty
+    string.  The return value of that function is forwarded
+    to the reader function.  So if it returns an empty string
+    :meth:`read` will return an empty string as well.
+
+    The limit however must never be higher than what the stream can
+    output.  Otherwise :meth:`readlines` will try to read past the
+    limit.
+
+    .. admonition:: Note on WSGI compliance
+
+       calls to :meth:`readline` and :meth:`readlines` are not
+       WSGI compliant because it passes a size argument to the
+       readline methods.  Unfortunately the WSGI PEP is not safely
+       implementable without a size argument to :meth:`readline`
+       because there is no EOF marker in the stream.  As a result
+       of that the use of :meth:`readline` is discouraged.
+
+       For the same reason iterating over the :class:`LimitedStream`
+       is not portable.  It internally calls :meth:`readline`.
+
+       We strongly suggest using :meth:`read` only or using the
+       :func:`make_line_iter` which safely iterates line-based
+       over a WSGI input stream.
+
+    :param stream: the stream to wrap.
+    :param limit: the limit for the stream, must not be longer than
+                  what the string can provide if the stream does not
+                  end with `EOF` (like `wsgi.input`)
+    """
+
+    def __init__(self, stream: t.BinaryIO, limit: int) -> None:
+        self._read = stream.read
+        self._readline = stream.readline
+        self._pos = 0
+        self.limit = limit
+
+    def __iter__(self) -> "LimitedStream":
+        return self
+
+    @property
+    def is_exhausted(self) -> bool:
+        """If the stream is exhausted this attribute is `True`."""
+        return self._pos >= self.limit
+
+    def on_exhausted(self) -> bytes:
+        """This is called when the stream tries to read past the limit.
+        The return value of this function is returned from the reading
+        function.
+        """
+        # Read null bytes from the stream so that we get the
+        # correct end of stream marker.
+        return self._read(0)
+
+    def on_disconnect(self) -> bytes:
+        """What should happen if a disconnect is detected?  The return
+        value of this function is returned from read functions in case
+        the client went away.  By default a
+        :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised.
+        """
+        from .exceptions import ClientDisconnected
+
+        raise ClientDisconnected()
+
+    def exhaust(self, chunk_size: int = 1024 * 64) -> None:
+        """Exhaust the stream.  This consumes all the data left until the
+        limit is reached.
+
+        :param chunk_size: the size for a chunk.  It will read the chunk
+                           until the stream is exhausted and throw away
+                           the results.
+        """
+        to_read = self.limit - self._pos
+        chunk = chunk_size
+        while to_read > 0:
+            chunk = min(to_read, chunk)
+            self.read(chunk)
+            to_read -= chunk
+
+    def read(self, size: t.Optional[int] = None) -> bytes:
+        """Read `size` bytes or if size is not provided everything is read.
+
+        :param size: the number of bytes read.
+        """
+        if self._pos >= self.limit:
+            return self.on_exhausted()
+        if size is None or size == -1:  # -1 is for consistence with file
+            size = self.limit
+        to_read = min(self.limit - self._pos, size)
+        try:
+            read = self._read(to_read)
+        except (OSError, ValueError):
+            return self.on_disconnect()
+        if to_read and len(read) != to_read:
+            return self.on_disconnect()
+        self._pos += len(read)
+        return read
+
+    def readline(self, size: t.Optional[int] = None) -> bytes:
+        """Reads one line from the stream."""
+        if self._pos >= self.limit:
+            return self.on_exhausted()
+        if size is None:
+            size = self.limit - self._pos
+        else:
+            size = min(size, self.limit - self._pos)
+        try:
+            line = self._readline(size)
+        except (ValueError, OSError):
+            return self.on_disconnect()
+        if size and not line:
+            return self.on_disconnect()
+        self._pos += len(line)
+        return line
+
+    def readlines(self, size: t.Optional[int] = None) -> t.List[bytes]:
+        """Reads a file into a list of strings.  It calls :meth:`readline`
+        until the file is read to the end.  It does support the optional
+        `size` argument if the underlying stream supports it for
+        `readline`.
+        """
+        last_pos = self._pos
+        result = []
+        if size is not None:
+            end = min(self.limit, last_pos + size)
+        else:
+            end = self.limit
+        while True:
+            if size is not None:
+                size -= last_pos - self._pos
+            if self._pos >= end:
+                break
+            result.append(self.readline(size))
+            if size is not None:
+                last_pos = self._pos
+        return result
+
+    def tell(self) -> int:
+        """Returns the position of the stream.
+
+        .. versionadded:: 0.9
+        """
+        return self._pos
+
+    def __next__(self) -> bytes:
+        line = self.readline()
+        if not line:
+            raise StopIteration()
+        return line
+
+    def readable(self) -> bool:
+        return True
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/LICENSE.txt b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/LICENSE.txt
new file mode 100644
index 00000000..c3441e6c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/LICENSE.txt
@@ -0,0 +1,22 @@
+"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> and
+contributors.
+
+The MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/METADATA b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/METADATA
new file mode 100644
index 00000000..10047611
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/METADATA
@@ -0,0 +1,68 @@
+Metadata-Version: 2.1
+Name: wheel
+Version: 0.36.2
+Summary: A built-package format for Python
+Home-page: https://github.com/pypa/wheel
+Author: Daniel Holth
+Author-email: dholth@fastmail.fm
+Maintainer: Alex Grönholm
+Maintainer-email: alex.gronholm@nextday.fi
+License: MIT
+Project-URL: Documentation, https://wheel.readthedocs.io/
+Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html
+Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues
+Keywords: wheel,packaging
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Provides-Extra: test
+Requires-Dist: pytest (>=3.0.0) ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+
+wheel
+=====
+
+This library is the reference implementation of the Python wheel packaging
+standard, as defined in `PEP 427`_.
+
+It has two different roles:
+
+#. A setuptools_ extension for building wheels that provides the
+   ``bdist_wheel`` setuptools command
+#. A command line tool for working with wheel files
+
+It should be noted that wheel is **not** intended to be used as a library, and
+as such there is no stable, public API.
+
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+.. _setuptools: https://pypi.org/project/setuptools/
+
+Documentation
+-------------
+
+The documentation_ can be found on Read The Docs.
+
+.. _documentation: https://wheel.readthedocs.io/
+
+Code of Conduct
+---------------
+
+Everyone interacting in the wheel project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+
+
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/RECORD b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/RECORD
new file mode 100644
index 00000000..06340d77
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/RECORD
@@ -0,0 +1,40 @@
+../../../bin/wheel,sha256=LvqJZYYmFZbcumengxyFgKhAHxIZSi1n70u9VqXGJUE,249
+wheel-0.36.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+wheel-0.36.2.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125
+wheel-0.36.2.dist-info/METADATA,sha256=w96pAXMHZWzFy-dwqGZqXbxu5Hup3cb3OHujhCuWfDs,2277
+wheel-0.36.2.dist-info/RECORD,,
+wheel-0.36.2.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+wheel-0.36.2.dist-info/entry_points.txt,sha256=N8HbYFST3yrNQYeB2wXWBEPUhFsEtKNRPaCFGJPyqyc,108
+wheel-0.36.2.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6
+wheel/__init__.py,sha256=gEack6q3vgbmrgen7cO3fCOv6vFF4hh2KYyj3fKk54I,23
+wheel/__main__.py,sha256=lF-YLO4hdQmoWuh4eWZd8YL1U95RSdm76sNLBXa0vjE,417
+wheel/__pycache__/__init__.cpython-37.pyc,,
+wheel/__pycache__/__main__.cpython-37.pyc,,
+wheel/__pycache__/bdist_wheel.cpython-37.pyc,,
+wheel/__pycache__/macosx_libfile.cpython-37.pyc,,
+wheel/__pycache__/metadata.cpython-37.pyc,,
+wheel/__pycache__/pkginfo.cpython-37.pyc,,
+wheel/__pycache__/util.cpython-37.pyc,,
+wheel/__pycache__/wheelfile.cpython-37.pyc,,
+wheel/bdist_wheel.py,sha256=2vfv3g_b8BvZ5Do9bpLEBdu9dQEcvoMQ1flXpKYFJDU,19075
+wheel/cli/__init__.py,sha256=GWSoGUpRabTf8bk3FsNTPrc5Fsr8YOv2dX55iY2W7eY,2572
+wheel/cli/__pycache__/__init__.cpython-37.pyc,,
+wheel/cli/__pycache__/convert.cpython-37.pyc,,
+wheel/cli/__pycache__/pack.cpython-37.pyc,,
+wheel/cli/__pycache__/unpack.cpython-37.pyc,,
+wheel/cli/convert.py,sha256=7F4vj23A2OghDDWn9gX2V-_TeXMza1a5nIejmFGEUJM,9498
+wheel/cli/pack.py,sha256=S-J1iIy1GPDTTDdn-_SwxGa7N729h4iZNI11EDFCqfA,3208
+wheel/cli/unpack.py,sha256=0VWzT7U_xyenTPwEVavxqvdee93GPvAFHnR3Uu91aRc,673
+wheel/macosx_libfile.py,sha256=7vEcGll23K3fiLjttH0intjxR0NIK8O6vimjRZ7BCoc,15931
+wheel/metadata.py,sha256=b3kPhZn2w2D9wengltX5nGIZQ3ERUOQ5U-K5vHKPdeg,4344
+wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257
+wheel/util.py,sha256=mnNZkJCi9DHLI_q4lTudoD0mW97h_AoAWl7prNPLXJc,938
+wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/__pycache__/__init__.cpython-37.pyc,,
+wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/packaging/__pycache__/__init__.cpython-37.pyc,,
+wheel/vendored/packaging/__pycache__/_typing.cpython-37.pyc,,
+wheel/vendored/packaging/__pycache__/tags.cpython-37.pyc,,
+wheel/vendored/packaging/_typing.py,sha256=x59EhQ57TMT-kTRyLZV25HZvYGGwbucTo6iKh_O0tMw,1812
+wheel/vendored/packaging/tags.py,sha256=y8hgFqfV0Krio-HegPB3D0p_AeiYcE8XwAlEdmErDe0,28937
+wheel/wheelfile.py,sha256=7KgOK1znro-D8AelgNEE4jg6fDYXY_Bu6crdqLb2EQQ,7336
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/WHEEL b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/WHEEL
new file mode 100644
index 00000000..01b8fc7d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/entry_points.txt b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/entry_points.txt
new file mode 100644
index 00000000..b27acadd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/entry_points.txt
@@ -0,0 +1,6 @@
+[console_scripts]
+wheel = wheel.cli:main
+
+[distutils.commands]
+bdist_wheel = wheel.bdist_wheel:bdist_wheel
+
diff --git a/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/top_level.txt
new file mode 100644
index 00000000..2309722a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel-0.36.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+wheel
diff --git a/venv/lib/python3.7/site-packages/wheel/__init__.py b/venv/lib/python3.7/site-packages/wheel/__init__.py
new file mode 100644
index 00000000..349a8f7b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/__init__.py
@@ -0,0 +1 @@
+__version__ = '0.36.2'
diff --git a/venv/lib/python3.7/site-packages/wheel/__main__.py b/venv/lib/python3.7/site-packages/wheel/__main__.py
new file mode 100644
index 00000000..b3773a20
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/__main__.py
@@ -0,0 +1,19 @@
+"""
+Wheel command line tool (enable python -m wheel syntax)
+"""
+
+import sys
+
+
+def main():  # needed for console script
+    if __package__ == '':
+        # To be able to run 'python wheel-0.9.whl/wheel':
+        import os.path
+        path = os.path.dirname(os.path.dirname(__file__))
+        sys.path[0:0] = [path]
+    import wheel.cli
+    sys.exit(wheel.cli.main())
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/venv/lib/python3.7/site-packages/wheel/bdist_wheel.py b/venv/lib/python3.7/site-packages/wheel/bdist_wheel.py
new file mode 100644
index 00000000..80e43d0a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/bdist_wheel.py
@@ -0,0 +1,492 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+import distutils
+import os
+import shutil
+import stat
+import sys
+import re
+import warnings
+from collections import OrderedDict
+from distutils.core import Command
+from distutils import log as logger
+from io import BytesIO
+from glob import iglob
+from shutil import rmtree
+from sysconfig import get_config_var
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import pkg_resources
+
+from .pkginfo import write_pkg_info
+from .macosx_libfile import calculate_macosx_platform_tag
+from .metadata import pkginfo_to_metadata
+from .vendored.packaging import tags
+from .wheelfile import WheelFile
+from . import __version__ as wheel_version
+
+if sys.version_info < (3,):
+    from email.generator import Generator as BytesGenerator
+else:
+    from email.generator import BytesGenerator
+
+safe_name = pkg_resources.safe_name
+safe_version = pkg_resources.safe_version
+
+PY_LIMITED_API_PATTERN = r'cp3\d'
+
+
+def python_tag():
+    return 'py{}'.format(sys.version_info[0])
+
+
+def get_platform(archive_root):
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    # XXX remove distutils dependency
+    result = distutils.util.get_platform()
+    if result.startswith("macosx") and archive_root is not None:
+        result = calculate_macosx_platform_tag(archive_root, result)
+    if result == "linux_x86_64" and sys.maxsize == 2147483647:
+        # pip pull request #3497
+        result = "linux_i686"
+    return result
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn("Config variable '{0}' is unset, Python ABI tag may "
+                          "be incorrect".format(var), RuntimeWarning, 2)
+        return fallback
+    return val == expected
+
+
+def get_abi_tag():
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI
+    (CPython 2, PyPy)."""
+    soabi = get_config_var('SOABI')
+    impl = tags.interpreter_name()
+    if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
+        d = ''
+        m = ''
+        u = ''
+        if get_flag('Py_DEBUG',
+                    hasattr(sys, 'gettotalrefcount'),
+                    warn=(impl == 'cp')):
+            d = 'd'
+        if get_flag('WITH_PYMALLOC',
+                    impl == 'cp',
+                    warn=(impl == 'cp' and
+                          sys.version_info < (3, 8))) \
+                and sys.version_info < (3, 8):
+            m = 'm'
+        if get_flag('Py_UNICODE_SIZE',
+                    sys.maxunicode == 0x10ffff,
+                    expected=4,
+                    warn=(impl == 'cp' and
+                          sys.version_info < (3, 3))) \
+                and sys.version_info < (3, 3):
+            u = 'u'
+        abi = '%s%s%s%s%s' % (impl, tags.interpreter_version(), d, m, u)
+    elif soabi and soabi.startswith('cpython-'):
+        abi = 'cp' + soabi.split('-')[1]
+    elif soabi and soabi.startswith('pypy-'):
+        # we want something like pypy36-pp73
+        abi = '-'.join(soabi.split('-')[:2])
+        abi = abi.replace('.', '_').replace('-', '_')
+    elif soabi:
+        abi = soabi.replace('.', '_').replace('-', '_')
+    else:
+        abi = None
+    return abi
+
+
+def safer_name(name):
+    return safe_name(name).replace('-', '_')
+
+
+def safer_version(version):
+    return safe_version(version).replace('-', '_')
+
+
+def remove_readonly(func, path, excinfo):
+    print(str(excinfo[1]))
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+class bdist_wheel(Command):
+
+    description = 'create a wheel distribution'
+
+    supported_compressions = OrderedDict([
+        ('stored', ZIP_STORED),
+        ('deflated', ZIP_DEFLATED)
+    ])
+
+    user_options = [('bdist-dir=', 'b',
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform(None)),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('relative', None,
+                     "build the archive using relative paths "
+                     "(default: false)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                    ('universal', None,
+                     "make a universal wheel"
+                     " (default: false)"),
+                    ('compression=', None,
+                     "zipfile compression (one of: {})"
+                     " (default: 'deflated')"
+                     .format(', '.join(supported_compressions))),
+                    ('python-tag=', None,
+                     "Python implementation compatibility tag"
+                     " (default: '%s')" % (python_tag())),
+                    ('build-number=', None,
+                     "Build number for this particular version. "
+                     "As specified in PEP-0427, this must start with a digit. "
+                     "[default: None]"),
+                    ('py-limited-api=', None,
+                     "Python tag (cp32|cp33|cpNN) for abi3 wheel tag"
+                     " (default: false)"),
+                    ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.data_dir = None
+        self.plat_name = None
+        self.plat_tag = None
+        self.format = 'zip'
+        self.keep_temp = False
+        self.dist_dir = None
+        self.egginfo_dir = None
+        self.root_is_pure = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal = False
+        self.compression = 'deflated'
+        self.python_tag = python_tag()
+        self.build_number = None
+        self.py_limited_api = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'wheel')
+
+        self.data_dir = self.wheel_dist_name + '.data'
+        self.plat_name_supplied = self.plat_name is not None
+
+        try:
+            self.compression = self.supported_compressions[self.compression]
+        except KeyError:
+            raise ValueError('Unsupported compression: {}'.format(self.compression))
+
+        need_options = ('dist_dir', 'plat_name', 'skip_build')
+
+        self.set_undefined_options('bdist',
+                                   *zip(need_options, need_options))
+
+        self.root_is_pure = not (self.distribution.has_ext_modules()
+                                 or self.distribution.has_c_libraries())
+
+        if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
+            raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict('wheel')
+        if 'universal' in wheel:
+            # please don't define this in your global configs
+            logger.warn('The [wheel] section is deprecated. Use [bdist_wheel] instead.')
+            val = wheel['universal'][1].strip()
+            if val.lower() in ('1', 'true', 'yes'):
+                self.universal = True
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    @property
+    def wheel_dist_name(self):
+        """Return distribution full name with - replaced with _"""
+        components = (safer_name(self.distribution.get_name()),
+                      safer_version(self.distribution.get_version()))
+        if self.build_number:
+            components += (self.build_number,)
+        return '-'.join(components)
+
+    def get_tag(self):
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied:
+            plat_name = self.plat_name
+        elif self.root_is_pure:
+            plat_name = 'any'
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:
+                plat_name = 'linux_i686'
+
+        plat_name = plat_name.lower().replace('-', '_').replace('.', '_')
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = 'py2.py3'
+            else:
+                impl = self.python_tag
+            tag = (impl, 'none', plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'):
+                impl = self.py_limited_api
+                abi_tag = 'abi3'
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [(t.interpreter, t.abi, plat_name)
+                              for t in tags.sys_tags()]
+            assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag)
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command('build_scripts')
+        build_scripts.executable = 'python'
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command('build_ext')
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install',
+                                            reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command('install_scripts')
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
+            setattr(install,
+                    'install_' + key,
+                    os.path.join(self.data_dir, key))
+
+        basedir_observed = ''
+
+        if os.name == 'nt':
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(install,
+                'install_purelib' if self.root_is_pure else 'install_platlib',
+                basedir_observed)
+
+        logger.info("installing to %s", self.bdist_dir)
+
+        self.run_command('install')
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = "{}-{}-{}-{}".format(self.wheel_dist_name, impl_tag, abi_tag, plat_tag)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir,
+                self._ensure_relative(install.install_base))
+
+        self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir'))
+        distinfo_dirname = '{}-{}.dist-info'.format(
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()))
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl')
+        with WheelFile(wheel_path, 'w', self.compression) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, 'dist_files', []).append(
+            ('bdist_wheel',
+             '{}.{}'.format(*sys.version_info[:2]),  # like 3.7
+             wheel_path))
+
+        if not self.keep_temp:
+            logger.info('removing %s', self.bdist_dir)
+            if not self.dry_run:
+                rmtree(self.bdist_dir, onerror=remove_readonly)
+
+    def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):
+        from email.message import Message
+
+        # Workaround for Python 2.7 for when "generator" is unicode
+        if sys.version_info < (3,) and not isinstance(generator, str):
+            generator = generator.encode('utf-8')
+
+        msg = Message()
+        msg['Wheel-Version'] = '1.0'  # of the spec
+        msg['Generator'] = generator
+        msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg['Build'] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split('.'):
+            for abi in abi_tag.split('.'):
+                for plat in plat_tag.split('.'):
+                    msg['Tag'] = '-'.join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
+        logger.info('creating %s', wheelfile_path)
+        buffer = BytesIO()
+        BytesGenerator(buffer, maxheaderlen=0).flatten(msg)
+        with open(wheelfile_path, 'wb') as f:
+            f.write(buffer.getvalue().replace(b'\r\n', b'\r'))
+
+    def _ensure_relative(self, path):
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self):
+        metadata = self.distribution.get_option_dict('metadata')
+        files = set()
+        patterns = sorted({
+            option for option in metadata.get('license_files', ('', ''))[1].split()
+        })
+
+        if 'license_file' in metadata:
+            warnings.warn('The "license_file" option is deprecated. Use '
+                          '"license_files" instead.', DeprecationWarning)
+            files.add(metadata['license_file'][1])
+
+        if 'license_file' not in metadata and 'license_files' not in metadata:
+            patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith('~'):
+                    logger.debug('ignoring license file "%s" as it looks like a backup', path)
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    logger.info('adding license file "%s" (matched pattern "%s")', path, pattern)
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path, distinfo_path):
+        """Convert an .egg-info directory into a .dist-info directory"""
+        def adios(p):
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+            pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
+            possible = glob.glob(pat)
+            err = "Egg metadata expected at %s but not found" % (egginfo_path,)
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += " (%s found - possible misnamed archive file?)" % (alt,)
+
+            raise ValueError(err)
+
+        if os.path.isfile(egginfo_path):
+            # .egg-info is a single file
+            pkginfo_path = egginfo_path
+            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+            os.mkdir(distinfo_path)
+        else:
+            # .egg-info is a directory
+            pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
+            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+            # ignore common egg metadata that is useless to wheel
+            shutil.copytree(egginfo_path, distinfo_path,
+                            ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt',
+                                                 'not-zip-safe'}
+                            )
+
+            # delete dependency_links if it is only whitespace
+            dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
+            with open(dependency_links_path, 'r') as dependency_links_file:
+                dependency_links = dependency_links_file.read().strip()
+            if not dependency_links:
+                adios(dependency_links_path)
+
+        write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
+
+        for license_path in self.license_paths:
+            filename = os.path.basename(license_path)
+            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+        adios(egginfo_path)
diff --git a/venv/lib/python3.7/site-packages/wheel/cli/__init__.py b/venv/lib/python3.7/site-packages/wheel/cli/__init__.py
new file mode 100644
index 00000000..95740bfb
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/cli/__init__.py
@@ -0,0 +1,88 @@
+"""
+Wheel command-line utility.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+
+def require_pkgresources(name):
+    try:
+        import pkg_resources  # noqa: F401
+    except ImportError:
+        raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
+
+
+class WheelError(Exception):
+    pass
+
+
+def unpack_f(args):
+    from .unpack import unpack
+    unpack(args.wheelfile, args.dest)
+
+
+def pack_f(args):
+    from .pack import pack
+    pack(args.directory, args.dest_dir, args.build_number)
+
+
+def convert_f(args):
+    from .convert import convert
+    convert(args.files, args.dest_dir, args.verbose)
+
+
+def version_f(args):
+    from .. import __version__
+    print("wheel %s" % __version__)
+
+
+def parser():
+    p = argparse.ArgumentParser()
+    s = p.add_subparsers(help="commands")
+
+    unpack_parser = s.add_parser('unpack', help='Unpack wheel')
+    unpack_parser.add_argument('--dest', '-d', help='Destination directory',
+                               default='.')
+    unpack_parser.add_argument('wheelfile', help='Wheel file')
+    unpack_parser.set_defaults(func=unpack_f)
+
+    repack_parser = s.add_parser('pack', help='Repack wheel')
+    repack_parser.add_argument('directory', help='Root directory of the unpacked wheel')
+    repack_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
+                               help="Directory to store the wheel (default %(default)s)")
+    repack_parser.add_argument('--build-number', help="Build tag to use in the wheel name")
+    repack_parser.set_defaults(func=pack_f)
+
+    convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
+    convert_parser.add_argument('files', nargs='*', help='Files to convert')
+    convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
+                                help="Directory to store wheels (default %(default)s)")
+    convert_parser.add_argument('--verbose', '-v', action='store_true')
+    convert_parser.set_defaults(func=convert_f)
+
+    version_parser = s.add_parser('version', help='Print version and exit')
+    version_parser.set_defaults(func=version_f)
+
+    help_parser = s.add_parser('help', help='Show this help')
+    help_parser.set_defaults(func=lambda args: p.print_help())
+
+    return p
+
+
+def main():
+    p = parser()
+    args = p.parse_args()
+    if not hasattr(args, 'func'):
+        p.print_help()
+    else:
+        try:
+            args.func(args)
+            return 0
+        except WheelError as e:
+            print(e, file=sys.stderr)
+
+    return 1
diff --git a/venv/lib/python3.7/site-packages/wheel/cli/convert.py b/venv/lib/python3.7/site-packages/wheel/cli/convert.py
new file mode 100644
index 00000000..154f1b1e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/cli/convert.py
@@ -0,0 +1,269 @@
+import os.path
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+from distutils import dist
+from glob import iglob
+
+from ..bdist_wheel import bdist_wheel
+from ..wheelfile import WheelFile
+from . import WheelError, require_pkgresources
+
+egg_info_re = re.compile(r'''
+    (?P<name>.+?)-(?P<ver>.+?)
+    (-(?P<pyver>py\d\.\d+)
+     (-(?P<arch>.+?))?
+    )?.egg$''', re.VERBOSE)
+
+
+class _bdist_wheel_tag(bdist_wheel):
+    # allow the client to override the default generated wheel tag
+    # The default bdist_wheel implementation uses python and abi tags
+    # of the running python process. This is not suitable for
+    # generating/repackaging prebuild binaries.
+
+    full_tag_supplied = False
+    full_tag = None  # None or a (pytag, soabitag, plattag) triple
+
+    def get_tag(self):
+        if self.full_tag_supplied and self.full_tag is not None:
+            return self.full_tag
+        else:
+            return bdist_wheel.get_tag(self)
+
+
+def egg2wheel(egg_path, dest_dir):
+    filename = os.path.basename(egg_path)
+    match = egg_info_re.match(filename)
+    if not match:
+        raise WheelError('Invalid egg file name: {}'.format(filename))
+
+    egg_info = match.groupdict()
+    dir = tempfile.mkdtemp(suffix="_e2w")
+    if os.path.isfile(egg_path):
+        # assume we have a bdist_egg otherwise
+        with zipfile.ZipFile(egg_path) as egg:
+            egg.extractall(dir)
+    else:
+        # support buildout-style installed eggs directories
+        for pth in os.listdir(egg_path):
+            src = os.path.join(egg_path, pth)
+            if os.path.isfile(src):
+                shutil.copy2(src, dir)
+            else:
+                shutil.copytree(src, os.path.join(dir, pth))
+
+    pyver = egg_info['pyver']
+    if pyver:
+        pyver = egg_info['pyver'] = pyver.replace('.', '')
+
+    arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
+
+    # assume all binary eggs are for CPython
+    abi = 'cp' + pyver[2:] if arch != 'any' else 'none'
+
+    root_is_purelib = egg_info['arch'] is None
+    if root_is_purelib:
+        bw = bdist_wheel(dist.Distribution())
+    else:
+        bw = _bdist_wheel_tag(dist.Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = egg_info['arch'] or 'any'
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info))
+    bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
+    wheel_name = '{name}-{ver}-{pyver}-{}-{}.whl'.format(abi, arch, **egg_info)
+    with WheelFile(os.path.join(dest_dir, wheel_name), 'w') as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def parse_wininst_info(wininfo_name, egginfo_name):
+    """Extract metadata from filenames.
+
+    Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
+    the installer filename and the name of the egg-info directory embedded in
+    the zipfile (if any).
+
+    The egginfo filename has the format::
+
+        name-ver(-pyver)(-arch).egg-info
+
+    The installer filename has the format::
+
+        name-ver.arch(-pyver).exe
+
+    Some things to note:
+
+    1. The installer filename is not definitive. An installer can be renamed
+       and work perfectly well as an installer. So more reliable data should
+       be used whenever possible.
+    2. The egg-info data should be preferred for the name and version, because
+       these come straight from the distutils metadata, and are mandatory.
+    3. The pyver from the egg-info data should be ignored, as it is
+       constructed from the version of Python used to build the installer,
+       which is irrelevant - the installer filename is correct here (even to
+       the point that when it's not there, any version is implied).
+    4. The architecture must be taken from the installer filename, as it is
+       not included in the egg-info data.
+    5. Architecture-neutral installers still have an architecture because the
+       installer format itself (being executable) is architecture-specific. We
+       should therefore ignore the architecture if the content is pure-python.
+    """
+
+    egginfo = None
+    if egginfo_name:
+        egginfo = egg_info_re.search(egginfo_name)
+        if not egginfo:
+            raise ValueError("Egg info filename %s is not valid" % (egginfo_name,))
+
+    # Parse the wininst filename
+    # 1. Distribution name (up to the first '-')
+    w_name, sep, rest = wininfo_name.partition('-')
+    if not sep:
+        raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
+
+    # Strip '.exe'
+    rest = rest[:-4]
+    # 2. Python version (from the last '-', must start with 'py')
+    rest2, sep, w_pyver = rest.rpartition('-')
+    if sep and w_pyver.startswith('py'):
+        rest = rest2
+        w_pyver = w_pyver.replace('.', '')
+    else:
+        # Not version specific - use py2.py3. While it is possible that
+        # pure-Python code is not compatible with both Python 2 and 3, there
+        # is no way of knowing from the wininst format, so we assume the best
+        # here (the user can always manually rename the wheel to be more
+        # restrictive if needed).
+        w_pyver = 'py2.py3'
+    # 3. Version and architecture
+    w_ver, sep, w_arch = rest.rpartition('.')
+    if not sep:
+        raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
+
+    if egginfo:
+        w_name = egginfo.group('name')
+        w_ver = egginfo.group('ver')
+
+    return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver}
+
+
+def wininst2wheel(path, dest_dir):
+    with zipfile.ZipFile(path) as bdw:
+        # Search for egg-info in the archive
+        egginfo_name = None
+        for filename in bdw.namelist():
+            if '.egg-info' in filename:
+                egginfo_name = filename
+                break
+
+        info = parse_wininst_info(os.path.basename(path), egginfo_name)
+
+        root_is_purelib = True
+        for zipinfo in bdw.infolist():
+            if zipinfo.filename.startswith('PLATLIB'):
+                root_is_purelib = False
+                break
+        if root_is_purelib:
+            paths = {'purelib': ''}
+        else:
+            paths = {'platlib': ''}
+
+        dist_info = "%(name)s-%(ver)s" % info
+        datadir = "%s.data/" % dist_info
+
+        # rewrite paths to trick ZipFile into extracting an egg
+        # XXX grab wininst .ini - between .exe, padding, and first zip file.
+        members = []
+        egginfo_name = ''
+        for zipinfo in bdw.infolist():
+            key, basename = zipinfo.filename.split('/', 1)
+            key = key.lower()
+            basepath = paths.get(key, None)
+            if basepath is None:
+                basepath = datadir + key.lower() + '/'
+            oldname = zipinfo.filename
+            newname = basepath + basename
+            zipinfo.filename = newname
+            del bdw.NameToInfo[oldname]
+            bdw.NameToInfo[newname] = zipinfo
+            # Collect member names, but omit '' (from an entry like "PLATLIB/"
+            if newname:
+                members.append(newname)
+            # Remember egg-info name for the egg2dist call below
+            if not egginfo_name:
+                if newname.endswith('.egg-info'):
+                    egginfo_name = newname
+                elif '.egg-info/' in newname:
+                    egginfo_name, sep, _ = newname.rpartition('/')
+        dir = tempfile.mkdtemp(suffix="_b2w")
+        bdw.extractall(dir, members)
+
+    # egg2wheel
+    abi = 'none'
+    pyver = info['pyver']
+    arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
+    # Wininst installers always have arch even if they are not
+    # architecture-specific (because the format itself is).
+    # So, assume the content is architecture-neutral if root is purelib.
+    if root_is_purelib:
+        arch = 'any'
+    # If the installer is architecture-specific, it's almost certainly also
+    # CPython-specific.
+    if arch != 'any':
+        pyver = pyver.replace('py', 'cp')
+    wheel_name = '-'.join((dist_info, pyver, abi, arch))
+    if root_is_purelib:
+        bw = bdist_wheel(dist.Distribution())
+    else:
+        bw = _bdist_wheel_tag(dist.Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = info['arch'] or 'any'
+
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
+    bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
+
+    wheel_path = os.path.join(dest_dir, wheel_name)
+    with WheelFile(wheel_path, 'w') as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def convert(files, dest_dir, verbose):
+    # Only support wheel convert if pkg_resources is present
+    require_pkgresources('wheel convert')
+
+    for pat in files:
+        for installer in iglob(pat):
+            if os.path.splitext(installer)[1] == '.egg':
+                conv = egg2wheel
+            else:
+                conv = wininst2wheel
+
+            if verbose:
+                print("{}... ".format(installer))
+                sys.stdout.flush()
+
+            conv(installer, dest_dir)
+            if verbose:
+                print("OK")
diff --git a/venv/lib/python3.7/site-packages/wheel/cli/pack.py b/venv/lib/python3.7/site-packages/wheel/cli/pack.py
new file mode 100644
index 00000000..1e77fdbd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/cli/pack.py
@@ -0,0 +1,79 @@
+from __future__ import print_function
+
+import os.path
+import re
+import sys
+
+from wheel.cli import WheelError
+from wheel.wheelfile import WheelFile
+
+DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
+BUILD_NUM_RE = re.compile(br'Build: (\d\w*)$')
+
+
+def pack(directory, dest_dir, build_number):
+    """Repack a previously unpacked wheel directory into a new wheel file.
+
+    The .dist-info/WHEEL file must contain one or more tags so that the target
+    wheel file name can be determined.
+
+    :param directory: The unpacked wheel directory
+    :param dest_dir: Destination directory (defaults to the current directory)
+    """
+    # Find the .dist-info directory
+    dist_info_dirs = [fn for fn in os.listdir(directory)
+                      if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)]
+    if len(dist_info_dirs) > 1:
+        raise WheelError('Multiple .dist-info directories found in {}'.format(directory))
+    elif not dist_info_dirs:
+        raise WheelError('No .dist-info directories found in {}'.format(directory))
+
+    # Determine the target wheel filename
+    dist_info_dir = dist_info_dirs[0]
+    name_version = DIST_INFO_RE.match(dist_info_dir).group('namever')
+
+    # Read the tags and the existing build number from .dist-info/WHEEL
+    existing_build_number = None
+    wheel_file_path = os.path.join(directory, dist_info_dir, 'WHEEL')
+    with open(wheel_file_path) as f:
+        tags = []
+        for line in f:
+            if line.startswith('Tag: '):
+                tags.append(line.split(' ')[1].rstrip())
+            elif line.startswith('Build: '):
+                existing_build_number = line.split(' ')[1].rstrip()
+
+        if not tags:
+            raise WheelError('No tags present in {}/WHEEL; cannot determine target wheel filename'
+                             .format(dist_info_dir))
+
+    # Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
+    build_number = build_number if build_number is not None else existing_build_number
+    if build_number is not None:
+        if build_number:
+            name_version += '-' + build_number
+
+        if build_number != existing_build_number:
+            replacement = ('Build: %s\r\n' % build_number).encode('ascii') if build_number else b''
+            with open(wheel_file_path, 'rb+') as f:
+                wheel_file_content = f.read()
+                if not BUILD_NUM_RE.subn(replacement, wheel_file_content)[1]:
+                    wheel_file_content += replacement
+
+                f.truncate()
+                f.write(wheel_file_content)
+
+    # Reassemble the tags for the wheel file
+    impls = sorted({tag.split('-')[0] for tag in tags})
+    abivers = sorted({tag.split('-')[1] for tag in tags})
+    platforms = sorted({tag.split('-')[2] for tag in tags})
+    tagline = '-'.join(['.'.join(impls), '.'.join(abivers), '.'.join(platforms)])
+
+    # Repack the wheel
+    wheel_path = os.path.join(dest_dir, '{}-{}.whl'.format(name_version, tagline))
+    with WheelFile(wheel_path, 'w') as wf:
+        print("Repacking wheel as {}...".format(wheel_path), end='')
+        sys.stdout.flush()
+        wf.write_files(directory)
+
+    print('OK')
diff --git a/venv/lib/python3.7/site-packages/wheel/cli/unpack.py b/venv/lib/python3.7/site-packages/wheel/cli/unpack.py
new file mode 100644
index 00000000..2e9857a3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/cli/unpack.py
@@ -0,0 +1,25 @@
+from __future__ import print_function
+
+import os.path
+import sys
+
+from ..wheelfile import WheelFile
+
+
+def unpack(path, dest='.'):
+    """Unpack a wheel.
+
+    Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
+    is the package name and {ver} its version.
+
+    :param path: The path to the wheel.
+    :param dest: Destination directory (default to current directory).
+    """
+    with WheelFile(path) as wf:
+        namever = wf.parsed_filename.group('namever')
+        destination = os.path.join(dest, namever)
+        print("Unpacking to: {}...".format(destination), end='')
+        sys.stdout.flush()
+        wf.extractall(destination)
+
+    print('OK')
diff --git a/venv/lib/python3.7/site-packages/wheel/macosx_libfile.py b/venv/lib/python3.7/site-packages/wheel/macosx_libfile.py
new file mode 100644
index 00000000..8918039f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/macosx_libfile.py
@@ -0,0 +1,428 @@
+"""
+This module contains function to analyse dynamic library
+headers to extract system information
+
+Currently only for MacOSX
+
+Library file on macosx system starts with Mach-O or Fat field.
+This can be distinguish by first 32 bites and it is called magic number.
+Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
+reversed bytes order.
+Both fields can occur in two types: 32 and 64 bytes.
+
+FAT field inform that this library contains few version of library
+(typically for different types version). It contains
+information where Mach-O headers starts.
+
+Each section started with Mach-O header contains one library
+(So if file starts with this field it contains only one version).
+
+After filed Mach-O there are section fields.
+Each of them starts with two fields:
+cmd - magic number for this command
+cmdsize - total size occupied by this section information.
+
+In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
+and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
+because them contains information about minimal system version.
+
+Important remarks:
+- For fat files this implementation looks for maximum number version.
+  It not check if it is 32 or 64 and do not compare it with currently builded package.
+  So it is possible to false report higher version that needed.
+- All structures signatures are taken form macosx header files.
+- I think that binary format will be more stable than `otool` output.
+  and if apple introduce some changes both implementation will need to be updated.
+- The system compile will set the deployment target no lower than
+  11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
+  target when the arm64 target is 11.0.
+"""
+
+import ctypes
+import os
+import sys
+
+"""here the needed const and struct from mach-o header files"""
+
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+FAT_MAGIC_64 = 0xcafebabf
+FAT_CIGAM_64 = 0xbfbafeca
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+
+LC_VERSION_MIN_MACOSX = 0x24
+LC_BUILD_VERSION = 0x32
+
+CPU_TYPE_ARM64 = 0x0100000c
+
+mach_header_fields = [
+        ("magic", ctypes.c_uint32), ("cputype", ctypes.c_int),
+        ("cpusubtype", ctypes.c_int), ("filetype", ctypes.c_uint32),
+        ("ncmds", ctypes.c_uint32), ("sizeofcmds", ctypes.c_uint32),
+        ("flags", ctypes.c_uint32)
+    ]
+"""
+struct mach_header {
+    uint32_t	magic;		/* mach magic number identifier */
+    cpu_type_t	cputype;	/* cpu specifier */
+    cpu_subtype_t	cpusubtype;	/* machine specifier */
+    uint32_t	filetype;	/* type of file */
+    uint32_t	ncmds;		/* number of load commands */
+    uint32_t	sizeofcmds;	/* the size of all the load commands */
+    uint32_t	flags;		/* flags */
+};
+typedef integer_t cpu_type_t;
+typedef integer_t cpu_subtype_t;
+"""
+
+mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
+"""
+struct mach_header_64 {
+    uint32_t	magic;		/* mach magic number identifier */
+    cpu_type_t	cputype;	/* cpu specifier */
+    cpu_subtype_t	cpusubtype;	/* machine specifier */
+    uint32_t	filetype;	/* type of file */
+    uint32_t	ncmds;		/* number of load commands */
+    uint32_t	sizeofcmds;	/* the size of all the load commands */
+    uint32_t	flags;		/* flags */
+    uint32_t	reserved;	/* reserved */
+};
+"""
+
+fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
+"""
+struct fat_header {
+    uint32_t	magic;		/* FAT_MAGIC or FAT_MAGIC_64 */
+    uint32_t	nfat_arch;	/* number of structs that follow */
+};
+"""
+
+fat_arch_fields = [
+    ("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint32), ("size", ctypes.c_uint32),
+    ("align", ctypes.c_uint32)
+]
+"""
+struct fat_arch {
+    cpu_type_t	cputype;	/* cpu specifier (int) */
+    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
+    uint32_t	offset;		/* file offset to this object file */
+    uint32_t	size;		/* size of this object file */
+    uint32_t	align;		/* alignment as a power of 2 */
+};
+"""
+
+fat_arch_64_fields = [
+    ("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint64), ("size", ctypes.c_uint64),
+    ("align", ctypes.c_uint32), ("reserved", ctypes.c_uint32)
+]
+"""
+struct fat_arch_64 {
+    cpu_type_t	cputype;	/* cpu specifier (int) */
+    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
+    uint64_t	offset;		/* file offset to this object file */
+    uint64_t	size;		/* size of this object file */
+    uint32_t	align;		/* alignment as a power of 2 */
+    uint32_t	reserved;	/* reserved */
+};
+"""
+
+segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
+"""base for reading segment info"""
+
+segment_command_fields = [
+    ("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint32),
+    ("vmsize", ctypes.c_uint32), ("fileoff", ctypes.c_uint32),
+    ("filesize", ctypes.c_uint32), ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+    ]
+"""
+struct segment_command { /* for 32-bit architectures */
+    uint32_t	cmd;		/* LC_SEGMENT */
+    uint32_t	cmdsize;	/* includes sizeof section structs */
+    char		segname[16];	/* segment name */
+    uint32_t	vmaddr;		/* memory address of this segment */
+    uint32_t	vmsize;		/* memory size of this segment */
+    uint32_t	fileoff;	/* file offset of this segment */
+    uint32_t	filesize;	/* amount to map from the file */
+    vm_prot_t	maxprot;	/* maximum VM protection */
+    vm_prot_t	initprot;	/* initial VM protection */
+    uint32_t	nsects;		/* number of sections in segment */
+    uint32_t	flags;		/* flags */
+};
+typedef int vm_prot_t;
+"""
+
+segment_command_fields_64 = [
+    ("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint64),
+    ("vmsize", ctypes.c_uint64), ("fileoff", ctypes.c_uint64),
+    ("filesize", ctypes.c_uint64), ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+    ]
+"""
+struct segment_command_64 { /* for 64-bit architectures */
+    uint32_t	cmd;		/* LC_SEGMENT_64 */
+    uint32_t	cmdsize;	/* includes sizeof section_64 structs */
+    char		segname[16];	/* segment name */
+    uint64_t	vmaddr;		/* memory address of this segment */
+    uint64_t	vmsize;		/* memory size of this segment */
+    uint64_t	fileoff;	/* file offset of this segment */
+    uint64_t	filesize;	/* amount to map from the file */
+    vm_prot_t	maxprot;	/* maximum VM protection */
+    vm_prot_t	initprot;	/* initial VM protection */
+    uint32_t	nsects;		/* number of sections in segment */
+    uint32_t	flags;		/* flags */
+};
+"""
+
+version_min_command_fields = segment_base_fields + \
+    [("version", ctypes.c_uint32), ("sdk", ctypes.c_uint32)]
+"""
+struct version_min_command {
+    uint32_t	cmd;		/* LC_VERSION_MIN_MACOSX or
+                               LC_VERSION_MIN_IPHONEOS or
+                               LC_VERSION_MIN_WATCHOS or
+                               LC_VERSION_MIN_TVOS */
+    uint32_t	cmdsize;	/* sizeof(struct min_version_command) */
+    uint32_t	version;	/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+};
+"""
+
+build_version_command_fields = segment_base_fields + \
+    [("platform", ctypes.c_uint32), ("minos", ctypes.c_uint32),
+     ("sdk", ctypes.c_uint32), ("ntools", ctypes.c_uint32)]
+"""
+struct build_version_command {
+    uint32_t	cmd;		/* LC_BUILD_VERSION */
+    uint32_t	cmdsize;	/* sizeof(struct build_version_command) plus */
+                                /* ntools * sizeof(struct build_tool_version) */
+    uint32_t	platform;	/* platform */
+    uint32_t	minos;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	ntools;		/* number of tool entries following this */
+};
+"""
+
+
+def swap32(x):
+    return (((x << 24) & 0xFF000000) |
+            ((x << 8) & 0x00FF0000) |
+            ((x >> 8) & 0x0000FF00) |
+            ((x >> 24) & 0x000000FF))
+
+
+def get_base_class_and_magic_number(lib_file, seek=None):
+    if seek is None:
+        seek = lib_file.tell()
+    else:
+        lib_file.seek(seek)
+    magic_number = ctypes.c_uint32.from_buffer_copy(
+        lib_file.read(ctypes.sizeof(ctypes.c_uint32))).value
+
+    # Handle wrong byte order
+    if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
+        if sys.byteorder == "little":
+            BaseClass = ctypes.BigEndianStructure
+        else:
+            BaseClass = ctypes.LittleEndianStructure
+
+        magic_number = swap32(magic_number)
+    else:
+        BaseClass = ctypes.Structure
+
+    lib_file.seek(seek)
+    return BaseClass, magic_number
+
+
+def read_data(struct_class, lib_file):
+    return struct_class.from_buffer_copy(lib_file.read(
+                        ctypes.sizeof(struct_class)))
+
+
+def extract_macosx_min_system_version(path_to_lib):
+    with open(path_to_lib, "rb") as lib_file:
+        BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
+        if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
+            return
+
+        if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
+            class FatHeader(BaseClass):
+                _fields_ = fat_header_fields
+
+            fat_header = read_data(FatHeader, lib_file)
+            if magic_number == FAT_MAGIC:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_fields
+            else:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_64_fields
+
+            fat_arch_list = [read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)]
+
+            versions_list = []
+            for el in fat_arch_list:
+                try:
+                    version = read_mach_header(lib_file, el.offset)
+                    if version is not None:
+                        if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
+                            # Xcode will not set the deployment target below 11.0.0
+                            # for the arm64 architecture. Ignore the arm64 deployment
+                            # in fat binaries when the target is 11.0.0, that way
+                            # the other architetures can select a lower deployment
+                            # target.
+                            # This is safe because there is no arm64 variant for
+                            # macOS 10.15 or earlier.
+                            if version == (11, 0, 0):
+                                continue
+                        versions_list.append(version)
+                except ValueError:
+                    pass
+
+            if len(versions_list) > 0:
+                return max(versions_list)
+            else:
+                return None
+
+        else:
+            try:
+                return read_mach_header(lib_file, 0)
+            except ValueError:
+                """when some error during read library files"""
+                return None
+
+
+def read_mach_header(lib_file, seek=None):
+    """
+    This funcition parse mach-O header and extract
+    information about minimal system version
+
+    :param lib_file: reference to opened library file with pointer
+    """
+    if seek is not None:
+        lib_file.seek(seek)
+    base_class, magic_number = get_base_class_and_magic_number(lib_file)
+    arch = "32" if magic_number == MH_MAGIC else "64"
+
+    class SegmentBase(base_class):
+        _fields_ = segment_base_fields
+
+    if arch == "32":
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields
+
+    else:
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields_64
+
+    mach_header = read_data(MachHeader, lib_file)
+    for _i in range(mach_header.ncmds):
+        pos = lib_file.tell()
+        segment_base = read_data(SegmentBase, lib_file)
+        lib_file.seek(pos)
+        if segment_base.cmd == LC_VERSION_MIN_MACOSX:
+            class VersionMinCommand(base_class):
+                _fields_ = version_min_command_fields
+
+            version_info = read_data(VersionMinCommand, lib_file)
+            return parse_version(version_info.version)
+        elif segment_base.cmd == LC_BUILD_VERSION:
+            class VersionBuild(base_class):
+                _fields_ = build_version_command_fields
+
+            version_info = read_data(VersionBuild, lib_file)
+            return parse_version(version_info.minos)
+        else:
+            lib_file.seek(pos + segment_base.cmdsize)
+            continue
+
+
+def parse_version(version):
+    x = (version & 0xffff0000) >> 16
+    y = (version & 0x0000ff00) >> 8
+    z = (version & 0x000000ff)
+    return x, y, z
+
+
+def calculate_macosx_platform_tag(archive_root, platform_tag):
+    """
+    Calculate proper macosx platform tag basing on files which are included to wheel
+
+    Example platform tag `macosx-10.14-x86_64`
+    """
+    prefix, base_version, suffix = platform_tag.split('-')
+    base_version = tuple([int(x) for x in base_version.split(".")])
+    base_version = base_version[:2]
+    if base_version[0] > 10:
+        base_version = (base_version[0], 0)
+    assert len(base_version) == 2
+    if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+        deploy_target = tuple([int(x) for x in os.environ[
+            "MACOSX_DEPLOYMENT_TARGET"].split(".")])
+        deploy_target = deploy_target[:2]
+        if deploy_target[0] > 10:
+            deploy_target = (deploy_target[0], 0)
+        if deploy_target < base_version:
+            sys.stderr.write(
+                 "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than the "
+                 "version on which the Python interpreter was compiled ({}), and will be "
+                 "ignored.\n".format('.'.join(str(x) for x in deploy_target),
+                                     '.'.join(str(x) for x in base_version))
+                )
+        else:
+            base_version = deploy_target
+
+    assert len(base_version) == 2
+    start_version = base_version
+    versions_dict = {}
+    for (dirpath, dirnames, filenames) in os.walk(archive_root):
+        for filename in filenames:
+            if filename.endswith('.dylib') or filename.endswith('.so'):
+                lib_path = os.path.join(dirpath, filename)
+                min_ver = extract_macosx_min_system_version(lib_path)
+                if min_ver is not None:
+                    min_ver = min_ver[0:2]
+                    if min_ver[0] > 10:
+                        min_ver = (min_ver[0], 0)
+                    versions_dict[lib_path] = min_ver
+
+    if len(versions_dict) > 0:
+        base_version = max(base_version, max(versions_dict.values()))
+
+    # macosx platform tag do not support minor bugfix release
+    fin_base_version = "_".join([str(x) for x in base_version])
+    if start_version < base_version:
+        problematic_files = [k for k, v in versions_dict.items() if v > start_version]
+        problematic_files = "\n".join(problematic_files)
+        if len(problematic_files) == 1:
+            files_form = "this file"
+        else:
+            files_form = "these files"
+        error_message = \
+            "[WARNING] This wheel needs a higher macOS version than {}  " \
+            "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least " +\
+            fin_base_version + " or recreate " + files_form + " with lower " \
+            "MACOSX_DEPLOYMENT_TARGET:  \n" + problematic_files
+
+        if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+            error_message = error_message.format("is set in MACOSX_DEPLOYMENT_TARGET variable.")
+        else:
+            error_message = error_message.format(
+                "the version your Python interpreter is compiled against.")
+
+        sys.stderr.write(error_message)
+
+    platform_tag = prefix + "_" + fin_base_version + "_" + suffix
+    return platform_tag
diff --git a/venv/lib/python3.7/site-packages/wheel/metadata.py b/venv/lib/python3.7/site-packages/wheel/metadata.py
new file mode 100644
index 00000000..37efa743
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/metadata.py
@@ -0,0 +1,133 @@
+"""
+Tools for converting old- to new-style metadata.
+"""
+
+import os.path
+import textwrap
+
+import pkg_resources
+
+from .pkginfo import read_pkg_info
+
+
+def requires_to_requires_dist(requirement):
+    """Return the version specifier for a requirement in PEP 345/566 fashion."""
+    if getattr(requirement, 'url', None):
+        return " @ " + requirement.url
+
+    requires_dist = []
+    for op, ver in requirement.specs:
+        requires_dist.append(op + ver)
+    if not requires_dist:
+        return ''
+    return " (%s)" % ','.join(sorted(requires_dist))
+
+
+def convert_requirements(requirements):
+    """Yield Requires-Dist: strings for parsed requirements strings."""
+    for req in requirements:
+        parsed_requirement = pkg_resources.Requirement.parse(req)
+        spec = requires_to_requires_dist(parsed_requirement)
+        extras = ",".join(sorted(parsed_requirement.extras))
+        if extras:
+            extras = "[%s]" % extras
+        yield (parsed_requirement.project_name + extras + spec)
+
+
+def generate_requirements(extras_require):
+    """
+    Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
+    and ('Provides-Extra', 'extra') tuples.
+
+    extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
+    using the empty extra {'': [requirements]} to hold install_requires.
+    """
+    for extra, depends in extras_require.items():
+        condition = ''
+        extra = extra or ''
+        if ':' in extra:  # setuptools extra:condition syntax
+            extra, condition = extra.split(':', 1)
+
+        extra = pkg_resources.safe_extra(extra)
+        if extra:
+            yield 'Provides-Extra', extra
+            if condition:
+                condition = "(" + condition + ") and "
+            condition += "extra == '%s'" % extra
+
+        if condition:
+            condition = ' ; ' + condition
+
+        for new_req in convert_requirements(depends):
+            yield 'Requires-Dist', new_req + condition
+
+
+def pkginfo_to_metadata(egg_info_path, pkginfo_path):
+    """
+    Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
+    """
+    pkg_info = read_pkg_info(pkginfo_path)
+    pkg_info.replace_header('Metadata-Version', '2.1')
+    # Those will be regenerated from `requires.txt`.
+    del pkg_info['Provides-Extra']
+    del pkg_info['Requires-Dist']
+    requires_path = os.path.join(egg_info_path, 'requires.txt')
+    if os.path.exists(requires_path):
+        with open(requires_path) as requires_file:
+            requires = requires_file.read()
+
+        parsed_requirements = sorted(pkg_resources.split_sections(requires),
+                                     key=lambda x: x[0] or '')
+        for extra, reqs in parsed_requirements:
+            for key, value in generate_requirements({extra: reqs}):
+                if (key, value) not in pkg_info.items():
+                    pkg_info[key] = value
+
+    description = pkg_info['Description']
+    if description:
+        pkg_info.set_payload(dedent_description(pkg_info))
+        del pkg_info['Description']
+
+    return pkg_info
+
+
+def pkginfo_unicode(pkg_info, field):
+    """Hack to coax Unicode out of an email Message() - Python 3.3+"""
+    text = pkg_info[field]
+    field = field.lower()
+    if not isinstance(text, str):
+        for item in pkg_info.raw_items():
+            if item[0].lower() == field:
+                text = item[1].encode('ascii', 'surrogateescape') \
+                    .decode('utf-8')
+                break
+
+    return text
+
+
+def dedent_description(pkg_info):
+    """
+    Dedent and convert pkg_info['Description'] to Unicode.
+    """
+    description = pkg_info['Description']
+
+    # Python 3 Unicode handling, sorta.
+    surrogates = False
+    if not isinstance(description, str):
+        surrogates = True
+        description = pkginfo_unicode(pkg_info, 'Description')
+
+    description_lines = description.splitlines()
+    description_dedent = '\n'.join(
+        # if the first line of long_description is blank,
+        # the first line here will be indented.
+        (description_lines[0].lstrip(),
+         textwrap.dedent('\n'.join(description_lines[1:])),
+         '\n'))
+
+    if surrogates:
+        description_dedent = description_dedent \
+            .encode("utf8") \
+            .decode("ascii", "surrogateescape")
+
+    return description_dedent
diff --git a/venv/lib/python3.7/site-packages/wheel/pkginfo.py b/venv/lib/python3.7/site-packages/wheel/pkginfo.py
new file mode 100644
index 00000000..115be45b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/pkginfo.py
@@ -0,0 +1,43 @@
+"""Tools for reading and writing PKG-INFO / METADATA without caring
+about the encoding."""
+
+from email.parser import Parser
+
+try:
+    unicode
+    _PY3 = False
+except NameError:
+    _PY3 = True
+
+if not _PY3:
+    from email.generator import Generator
+
+    def read_pkg_info_bytes(bytestr):
+        return Parser().parsestr(bytestr)
+
+    def read_pkg_info(path):
+        with open(path, "r") as headers:
+            message = Parser().parse(headers)
+        return message
+
+    def write_pkg_info(path, message):
+        with open(path, 'w') as metadata:
+            Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message)
+else:
+    from email.generator import BytesGenerator
+
+    def read_pkg_info_bytes(bytestr):
+        headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
+        message = Parser().parsestr(headers)
+        return message
+
+    def read_pkg_info(path):
+        with open(path, "r",
+                  encoding="ascii",
+                  errors="surrogateescape") as headers:
+            message = Parser().parse(headers)
+        return message
+
+    def write_pkg_info(path, message):
+        with open(path, "wb") as out:
+            BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message)
diff --git a/venv/lib/python3.7/site-packages/wheel/util.py b/venv/lib/python3.7/site-packages/wheel/util.py
new file mode 100644
index 00000000..3ae2b445
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/util.py
@@ -0,0 +1,46 @@
+import base64
+import io
+import sys
+
+
+if sys.version_info[0] < 3:
+    text_type = unicode  # noqa: F821
+
+    StringIO = io.BytesIO
+
+    def native(s, encoding='utf-8'):
+        if isinstance(s, unicode):  # noqa: F821
+            return s.encode(encoding)
+        return s
+else:
+    text_type = str
+
+    StringIO = io.StringIO
+
+    def native(s, encoding='utf-8'):
+        if isinstance(s, bytes):
+            return s.decode(encoding)
+        return s
+
+
+def urlsafe_b64encode(data):
+    """urlsafe_b64encode without padding"""
+    return base64.urlsafe_b64encode(data).rstrip(b'=')
+
+
+def urlsafe_b64decode(data):
+    """urlsafe_b64decode without padding"""
+    pad = b'=' * (4 - (len(data) & 3))
+    return base64.urlsafe_b64decode(data + pad)
+
+
+def as_unicode(s):
+    if isinstance(s, bytes):
+        return s.decode('utf-8')
+    return s
+
+
+def as_bytes(s):
+    if isinstance(s, text_type):
+        return s.encode('utf-8')
+    return s
diff --git a/venv/lib/python3.7/site-packages/wheel/vendored/__init__.py b/venv/lib/python3.7/site-packages/wheel/vendored/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/wheel/vendored/packaging/__init__.py b/venv/lib/python3.7/site-packages/wheel/vendored/packaging/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/venv/lib/python3.7/site-packages/wheel/vendored/packaging/_typing.py b/venv/lib/python3.7/site-packages/wheel/vendored/packaging/_typing.py
new file mode 100644
index 00000000..77a8b918
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/vendored/packaging/_typing.py
@@ -0,0 +1,48 @@
+"""For neatly implementing static typing in packaging.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In packaging, all static-typing related imports should be guarded as follows:
+
+    from packaging._typing import TYPE_CHECKING
+
+    if TYPE_CHECKING:
+        from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+__all__ = ["TYPE_CHECKING", "cast"]
+
+# The TYPE_CHECKING constant defined by the typing module is False at runtime
+# but True while type checking.
+if False:  # pragma: no cover
+    from typing import TYPE_CHECKING
+else:
+    TYPE_CHECKING = False
+
+# typing's cast syntax requires calling typing.cast at runtime, but we don't
+# want to import typing at runtime. Here, we inform the type checkers that
+# we're importing `typing.cast` as `cast` and re-implement typing.cast's
+# runtime behavior in a block that is ignored by type checkers.
+if TYPE_CHECKING:  # pragma: no cover
+    # not executed at runtime
+    from typing import cast
+else:
+    # executed at runtime
+    def cast(type_, value):  # noqa
+        return value
diff --git a/venv/lib/python3.7/site-packages/wheel/vendored/packaging/tags.py b/venv/lib/python3.7/site-packages/wheel/vendored/packaging/tags.py
new file mode 100644
index 00000000..3306c690
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/vendored/packaging/tags.py
@@ -0,0 +1,852 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import
+
+import distutils.util
+
+try:
+    from importlib.machinery import EXTENSION_SUFFIXES
+except ImportError:  # pragma: no cover
+    import imp
+
+    EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
+    del imp
+import collections
+import logging
+import os
+import platform
+import re
+import struct
+import sys
+import sysconfig
+import warnings
+
+from ._typing import TYPE_CHECKING, cast
+
+if TYPE_CHECKING:  # pragma: no cover
+    from typing import (
+        Dict,
+        FrozenSet,
+        IO,
+        Iterable,
+        Iterator,
+        List,
+        Optional,
+        Sequence,
+        Tuple,
+        Union,
+    )
+
+    PythonVersion = Sequence[int]
+    MacVersion = Tuple[int, int]
+    GlibcVersion = Tuple[int, int]
+
+
+logger = logging.getLogger(__name__)
+
+INTERPRETER_SHORT_NAMES = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}  # type: Dict[str, str]
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50)  # type: Dict[int, int]
+glibcVersion = collections.namedtuple("Version", ["major", "minor"])
+
+
+class Tag(object):
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+    def __init__(self, interpreter, abi, platform):
+        # type: (str, str, str) -> None
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self):
+        # type: () -> str
+        return self._interpreter
+
+    @property
+    def abi(self):
+        # type: () -> str
+        return self._abi
+
+    @property
+    def platform(self):
+        # type: () -> str
+        return self._platform
+
+    def __eq__(self, other):
+        # type: (object) -> bool
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self.platform == other.platform)
+            and (self.abi == other.abi)
+            and (self.interpreter == other.interpreter)
+        )
+
+    def __hash__(self):
+        # type: () -> int
+        return self._hash
+
+    def __str__(self):
+        # type: () -> str
+        return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
+
+    def __repr__(self):
+        # type: () -> str
+        return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
+
+
+def parse_tag(tag):
+    # type: (str) -> FrozenSet[Tag]
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _warn_keyword_parameter(func_name, kwargs):
+    # type: (str, Dict[str, bool]) -> bool
+    """
+    Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only.
+    """
+    if not kwargs:
+        return False
+    elif len(kwargs) > 1 or "warn" not in kwargs:
+        kwargs.pop("warn", None)
+        arg = next(iter(kwargs.keys()))
+        raise TypeError(
+            "{}() got an unexpected keyword argument {!r}".format(func_name, arg)
+        )
+    return kwargs["warn"]
+
+
+def _get_config_var(name, warn=False):
+    # type: (str, bool) -> Union[int, str, None]
+    value = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string):
+    # type: (str) -> str
+    return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version):
+    # type: (PythonVersion) -> bool
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version, warn=False):
+    # type: (PythonVersion, bool) -> List[str]
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append("cp{version}".format(version=version))
+    abis.insert(
+        0,
+        "cp{version}{debug}{pymalloc}{ucs4}".format(
+            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+        ),
+    )
+    return abis
+
+
+def cpython_tags(
+    python_version=None,  # type: Optional[PythonVersion]
+    abis=None,  # type: Optional[Iterable[str]]
+    platforms=None,  # type: Optional[Iterable[str]]
+    **kwargs  # type: bool
+):
+    # type: (...) -> Iterator[Tag]
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp<python_version>-<abi>-<platform>
+    - cp<python_version>-abi3-<platform>
+    - cp<python_version>-none-<platform>
+    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    warn = _warn_keyword_parameter("cpython_tags", kwargs)
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = "cp{}".format(_version_nodot(python_version[:2]))
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or _platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+    if _abi3_applies(python_version):
+        for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
+            yield tag
+    for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
+        yield tag
+
+    if _abi3_applies(python_version):
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                interpreter = "cp{version}".format(
+                    version=_version_nodot((python_version[0], minor_version))
+                )
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi():
+    # type: () -> Iterator[str]
+    abi = sysconfig.get_config_var("SOABI")
+    if abi:
+        yield _normalize_string(abi)
+
+
+def generic_tags(
+    interpreter=None,  # type: Optional[str]
+    abis=None,  # type: Optional[Iterable[str]]
+    platforms=None,  # type: Optional[Iterable[str]]
+    **kwargs  # type: bool
+):
+    # type: (...) -> Iterator[Tag]
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - <interpreter>-<abi>-<platform>
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    warn = _warn_keyword_parameter("generic_tags", kwargs)
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    platforms = list(platforms or _platform_tags())
+    abis = list(abis)
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version):
+    # type: (PythonVersion) -> Iterator[str]
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield "py{version}".format(version=_version_nodot(py_version[:2]))
+    yield "py{major}".format(major=py_version[0])
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
+
+
+def compatible_tags(
+    python_version=None,  # type: Optional[PythonVersion]
+    interpreter=None,  # type: Optional[str]
+    platforms=None,  # type: Optional[Iterable[str]]
+):
+    # type: (...) -> Iterator[Tag]
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-<platform>
+    - <interpreter>-none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or _platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
+    # type: (str, bool) -> str
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version, cpu_arch):
+    # type: (MacVersion, str) -> List[str]
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(version=None, arch=None):
+    # type: (Optional[MacVersion], Optional[str]) -> Iterator[str]
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()  # type: ignore
+    if version is None:
+        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        for minor_version in range(version[1], -1, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=10, minor=minor_version, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, 0
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=major_version, minor=0, binary_format=binary_format
+                )
+
+    if version >= (11, 0) and arch == "x86_64":
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        for minor_version in range(16, 3, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+
+
+# From PEP 513, PEP 600
+def _is_manylinux_compatible(name, arch, glibc_version):
+    # type: (str, str, GlibcVersion) -> bool
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < glibc_version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux  # noqa
+    except ImportError:
+        pass
+    else:
+        if hasattr(_manylinux, "manylinux_compatible"):
+            result = _manylinux.manylinux_compatible(
+                glibc_version[0], glibc_version[1], arch
+            )
+            if result is not None:
+                return bool(result)
+        else:
+            if glibc_version == (2, 5):
+                if hasattr(_manylinux, "manylinux1_compatible"):
+                    return bool(_manylinux.manylinux1_compatible)
+            if glibc_version == (2, 12):
+                if hasattr(_manylinux, "manylinux2010_compatible"):
+                    return bool(_manylinux.manylinux2010_compatible)
+            if glibc_version == (2, 17):
+                if hasattr(_manylinux, "manylinux2014_compatible"):
+                    return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+def _glibc_version_string():
+    # type: () -> Optional[str]
+    # Returns glibc version string, or None if not using glibc.
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _glibc_version_string_confstr():
+    # type: () -> Optional[str]
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+    try:
+        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+        version_string = os.confstr(  # type: ignore[attr-defined] # noqa: F821
+            "CS_GNU_LIBC_VERSION"
+        )
+        assert version_string is not None
+        _, version = version_string.split()  # type: Tuple[str, str]
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes():
+    # type: () -> Optional[str]
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        # Note: typeshed is wrong here so we are ignoring this line.
+        process_namespace = ctypes.CDLL(None)  # type: ignore
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str = gnu_get_libc_version()  # type: str
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _parse_glibc_version(version_str):
+    # type: (str) -> Tuple[int, int]
+    # Parse glibc version.
+    #
+    # We use a regexp instead of str.split because we want to discard any
+    # random junk that might come after the minor version -- this might happen
+    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    # uses version strings like "2.20-2014.11"). See gh-3588.
+    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            "Expected glibc version with 2 components major.minor,"
+            " got: %s" % version_str,
+            RuntimeWarning,
+        )
+        return -1, -1
+    return (int(m.group("major")), int(m.group("minor")))
+
+
+_glibc_version = []  # type: List[Tuple[int, int]]
+
+
+def _get_glibc_version():
+    # type: () -> Tuple[int, int]
+    if _glibc_version:
+        return _glibc_version[0]
+    version_str = _glibc_version_string()
+    if version_str is None:
+        _glibc_version.append((-1, -1))
+    else:
+        _glibc_version.append(_parse_glibc_version(version_str))
+    return _glibc_version[0]
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader(object):
+    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+    class _InvalidELFFileHeader(ValueError):
+        """
+        An invalid ELF file header was found.
+        """
+
+    ELF_MAGIC_NUMBER = 0x7F454C46
+    ELFCLASS32 = 1
+    ELFCLASS64 = 2
+    ELFDATA2LSB = 1
+    ELFDATA2MSB = 2
+    EM_386 = 3
+    EM_S390 = 22
+    EM_ARM = 40
+    EM_X86_64 = 62
+    EF_ARM_ABIMASK = 0xFF000000
+    EF_ARM_ABI_VER5 = 0x05000000
+    EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+    def __init__(self, file):
+        # type: (IO[bytes]) -> None
+        def unpack(fmt):
+            # type: (str) -> int
+            try:
+                (result,) = struct.unpack(
+                    fmt, file.read(struct.calcsize(fmt))
+                )  # type: (int, )
+            except struct.error:
+                raise _ELFFileHeader._InvalidELFFileHeader()
+            return result
+
+        self.e_ident_magic = unpack(">I")
+        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_class = unpack("B")
+        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_data = unpack("B")
+        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_version = unpack("B")
+        self.e_ident_osabi = unpack("B")
+        self.e_ident_abiversion = unpack("B")
+        self.e_ident_pad = file.read(7)
+        format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
+        format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
+        format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
+        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+        self.e_type = unpack(format_h)
+        self.e_machine = unpack(format_h)
+        self.e_version = unpack(format_i)
+        self.e_entry = unpack(format_p)
+        self.e_phoff = unpack(format_p)
+        self.e_shoff = unpack(format_p)
+        self.e_flags = unpack(format_i)
+        self.e_ehsize = unpack(format_h)
+        self.e_phentsize = unpack(format_h)
+        self.e_phnum = unpack(format_h)
+        self.e_shentsize = unpack(format_h)
+        self.e_shnum = unpack(format_h)
+        self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header():
+    # type: () -> Optional[_ELFFileHeader]
+    try:
+        with open(sys.executable, "rb") as f:
+            elf_header = _ELFFileHeader(f)
+    except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+        return None
+    return elf_header
+
+
+def _is_linux_armhf():
+    # type: () -> bool
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_ARM
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+    ) == elf_header.EF_ARM_ABI_VER5
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+    return result
+
+
+def _is_linux_i686():
+    # type: () -> bool
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_386
+    return result
+
+
+def _have_compatible_manylinux_abi(arch):
+    # type: (str) -> bool
+    if arch == "armv7l":
+        return _is_linux_armhf()
+    if arch == "i686":
+        return _is_linux_i686()
+    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+def _manylinux_tags(linux, arch):
+    # type: (str, str) -> Iterator[str]
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = glibcVersion(2, 16)
+    if arch in {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = glibcVersion(2, 4)
+    current_glibc = glibcVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major]))
+    for glibc_max in glibc_max_list:
+        if glibc_max.major == too_old_glibc2.major:
+            min_minor = too_old_glibc2.minor
+        else:
+            # For other glibc major versions oldest supported is (x, 0).
+            min_minor = -1
+        for glibc_minor in range(glibc_max.minor, min_minor, -1):
+            glibc_version = (glibc_max.major, glibc_minor)
+            tag = "manylinux_{}_{}".format(*glibc_version)
+            if _is_manylinux_compatible(tag, arch, glibc_version):
+                yield linux.replace("linux", tag)
+            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+            if glibc_version in _LEGACY_MANYLINUX_MAP:
+                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                if _is_manylinux_compatible(legacy_tag, arch, glibc_version):
+                    yield linux.replace("linux", legacy_tag)
+
+
+def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
+    # type: (bool) -> Iterator[str]
+    linux = _normalize_string(distutils.util.get_platform())
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv7l"
+    _, arch = linux.split("_", 1)
+    if _have_compatible_manylinux_abi(arch):
+        for tag in _manylinux_tags(linux, arch):
+            yield tag
+    yield linux
+
+
+def _generic_platforms():
+    # type: () -> Iterator[str]
+    yield _normalize_string(distutils.util.get_platform())
+
+
+def _platform_tags():
+    # type: () -> Iterator[str]
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name():
+    # type: () -> str
+    """
+    Returns the name of the running interpreter.
+    """
+    try:
+        name = sys.implementation.name  # type: ignore
+    except AttributeError:  # pragma: no cover
+        # Python 2.7 compatibility.
+        name = platform.python_implementation().lower()
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(**kwargs):
+    # type: (bool) -> str
+    """
+    Returns the version of the running interpreter.
+    """
+    warn = _warn_keyword_parameter("interpreter_version", kwargs)
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version):
+    # type: (PythonVersion) -> str
+    return "".join(map(str, version))
+
+
+def sys_tags(**kwargs):
+    # type: (bool) -> Iterator[Tag]
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+    warn = _warn_keyword_parameter("sys_tags", kwargs)
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        for tag in cpython_tags(warn=warn):
+            yield tag
+    else:
+        for tag in generic_tags():
+            yield tag
+
+    for tag in compatible_tags():
+        yield tag
diff --git a/venv/lib/python3.7/site-packages/wheel/wheelfile.py b/venv/lib/python3.7/site-packages/wheel/wheelfile.py
new file mode 100644
index 00000000..3ee97ddd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wheel/wheelfile.py
@@ -0,0 +1,169 @@
+from __future__ import print_function
+
+import csv
+import hashlib
+import os.path
+import re
+import stat
+import time
+from collections import OrderedDict
+from distutils import log as logger
+from zipfile import ZIP_DEFLATED, ZipInfo, ZipFile
+
+from wheel.cli import WheelError
+from wheel.util import urlsafe_b64decode, as_unicode, native, urlsafe_b64encode, as_bytes, StringIO
+
+# Non-greedy matching of an optional build number may be too clever (more
+# invalid wheel filenames will match). Separate regex for .dist-info?
+WHEEL_INFO_RE = re.compile(
+    r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))?
+     -(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""",
+    re.VERBOSE)
+
+
+def get_zipinfo_datetime(timestamp=None):
+    # Some applications need reproducible .whl files, but they can't do this without forcing
+    # the timestamp of the individual ZipInfo objects. See issue #143.
+    timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time()))
+    return time.gmtime(timestamp)[0:6]
+
+
+class WheelFile(ZipFile):
+    """A ZipFile derivative class that also reads SHA-256 hashes from
+    .dist-info/RECORD and checks any read files against those.
+    """
+
+    _default_algorithm = hashlib.sha256
+
+    def __init__(self, file, mode='r', compression=ZIP_DEFLATED):
+        basename = os.path.basename(file)
+        self.parsed_filename = WHEEL_INFO_RE.match(basename)
+        if not basename.endswith('.whl') or self.parsed_filename is None:
+            raise WheelError("Bad wheel filename {!r}".format(basename))
+
+        ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
+
+        self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
+        self.record_path = self.dist_info_path + '/RECORD'
+        self._file_hashes = OrderedDict()
+        self._file_sizes = {}
+        if mode == 'r':
+            # Ignore RECORD and any embedded wheel signatures
+            self._file_hashes[self.record_path] = None, None
+            self._file_hashes[self.record_path + '.jws'] = None, None
+            self._file_hashes[self.record_path + '.p7s'] = None, None
+
+            # Fill in the expected hashes by reading them from RECORD
+            try:
+                record = self.open(self.record_path)
+            except KeyError:
+                raise WheelError('Missing {} file'.format(self.record_path))
+
+            with record:
+                for line in record:
+                    line = line.decode('utf-8')
+                    path, hash_sum, size = line.rsplit(u',', 2)
+                    if hash_sum:
+                        algorithm, hash_sum = hash_sum.split(u'=')
+                        try:
+                            hashlib.new(algorithm)
+                        except ValueError:
+                            raise WheelError('Unsupported hash algorithm: {}'.format(algorithm))
+
+                        if algorithm.lower() in {'md5', 'sha1'}:
+                            raise WheelError(
+                                'Weak hash algorithm ({}) is not permitted by PEP 427'
+                                .format(algorithm))
+
+                        self._file_hashes[path] = (
+                            algorithm, urlsafe_b64decode(hash_sum.encode('ascii')))
+
+    def open(self, name_or_info, mode="r", pwd=None):
+        def _update_crc(newdata, eof=None):
+            if eof is None:
+                eof = ef._eof
+                update_crc_orig(newdata)
+            else:  # Python 2
+                update_crc_orig(newdata, eof)
+
+            running_hash.update(newdata)
+            if eof and running_hash.digest() != expected_hash:
+                raise WheelError("Hash mismatch for file '{}'".format(native(ef_name)))
+
+        ef_name = as_unicode(name_or_info.filename if isinstance(name_or_info, ZipInfo)
+                             else name_or_info)
+        if mode == 'r' and not ef_name.endswith('/') and ef_name not in self._file_hashes:
+            raise WheelError("No hash found for file '{}'".format(native(ef_name)))
+
+        ef = ZipFile.open(self, name_or_info, mode, pwd)
+        if mode == 'r' and not ef_name.endswith('/'):
+            algorithm, expected_hash = self._file_hashes[ef_name]
+            if expected_hash is not None:
+                # Monkey patch the _update_crc method to also check for the hash from RECORD
+                running_hash = hashlib.new(algorithm)
+                update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
+
+        return ef
+
+    def write_files(self, base_dir):
+        logger.info("creating '%s' and adding '%s' to it", self.filename, base_dir)
+        deferred = []
+        for root, dirnames, filenames in os.walk(base_dir):
+            # Sort the directory names so that `os.walk` will walk them in a
+            # defined order on the next iteration.
+            dirnames.sort()
+            for name in sorted(filenames):
+                path = os.path.normpath(os.path.join(root, name))
+                if os.path.isfile(path):
+                    arcname = os.path.relpath(path, base_dir).replace(os.path.sep, '/')
+                    if arcname == self.record_path:
+                        pass
+                    elif root.endswith('.dist-info'):
+                        deferred.append((path, arcname))
+                    else:
+                        self.write(path, arcname)
+
+        deferred.sort()
+        for path, arcname in deferred:
+            self.write(path, arcname)
+
+    def write(self, filename, arcname=None, compress_type=None):
+        with open(filename, 'rb') as f:
+            st = os.fstat(f.fileno())
+            data = f.read()
+
+        zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
+        zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
+        zinfo.compress_type = compress_type or self.compression
+        self.writestr(zinfo, data, compress_type)
+
+    def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
+        ZipFile.writestr(self, zinfo_or_arcname, bytes, compress_type)
+        fname = (zinfo_or_arcname.filename if isinstance(zinfo_or_arcname, ZipInfo)
+                 else zinfo_or_arcname)
+        logger.info("adding '%s'", fname)
+        if fname != self.record_path:
+            hash_ = self._default_algorithm(bytes)
+            self._file_hashes[fname] = hash_.name, native(urlsafe_b64encode(hash_.digest()))
+            self._file_sizes[fname] = len(bytes)
+
+    def close(self):
+        # Write RECORD
+        if self.fp is not None and self.mode == 'w' and self._file_hashes:
+            data = StringIO()
+            writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
+            writer.writerows((
+                (
+                    fname,
+                    algorithm + "=" + hash_,
+                    self._file_sizes[fname]
+                )
+                for fname, (algorithm, hash_) in self._file_hashes.items()
+            ))
+            writer.writerow((format(self.record_path), "", ""))
+            zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
+            zinfo.compress_type = self.compression
+            zinfo.external_attr = 0o664 << 16
+            self.writestr(zinfo, as_bytes(data.getvalue()))
+
+        ZipFile.close(self)
diff --git a/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/PKG-INFO b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/PKG-INFO
new file mode 100644
index 00000000..4bfdc76d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/PKG-INFO
@@ -0,0 +1,167 @@
+Metadata-Version: 2.1
+Name: wrapt
+Version: 1.12.1
+Summary: Module for decorators, wrappers and monkey patching.
+Home-page: https://github.com/GrahamDumpleton/wrapt
+Author: Graham Dumpleton
+Author-email: Graham.Dumpleton@gmail.com
+License: BSD
+Description: wrapt
+        =====
+        
+        |Travis| |AppVeyor| |Coveralls| |PyPI|
+        
+        The aim of the **wrapt** module is to provide a transparent object proxy
+        for Python, which can be used as the basis for the construction of function
+        wrappers and decorator functions.
+        
+        The **wrapt** module focuses very much on correctness. It therefore goes
+        way beyond existing mechanisms such as ``functools.wraps()`` to ensure that
+        decorators preserve introspectability, signatures, type checking abilities
+        etc. The decorators that can be constructed using this module will work in
+        far more scenarios than typical decorators and provide more predictable and
+        consistent behaviour.
+        
+        To ensure that the overhead is as minimal as possible, a C extension module
+        is used for performance critical components. An automatic fallback to a
+        pure Python implementation is also provided where a target system does not
+        have a compiler to allow the C extension to be compiled.
+        
+        Documentation
+        -------------
+        
+        For further information on the **wrapt** module see:
+        
+        * http://wrapt.readthedocs.org/
+        
+        Quick Start
+        -----------
+        
+        To implement your decorator you need to first define a wrapper function.
+        This will be called each time a decorated function is called. The wrapper
+        function needs to take four positional arguments:
+        
+        * ``wrapped`` - The wrapped function which in turns needs to be called by your wrapper function.
+        * ``instance`` - The object to which the wrapped function was bound when it was called.
+        * ``args`` - The list of positional arguments supplied when the decorated function was called.
+        * ``kwargs`` - The dictionary of keyword arguments supplied when the decorated function was called.
+        
+        The wrapper function would do whatever it needs to, but would usually in
+        turn call the wrapped function that is passed in via the ``wrapped``
+        argument.
+        
+        The decorator ``@wrapt.decorator`` then needs to be applied to the wrapper
+        function to convert it into a decorator which can in turn be applied to
+        other functions.
+        
+        ::
+        
+            import wrapt
+            
+            @wrapt.decorator
+            def pass_through(wrapped, instance, args, kwargs):
+                return wrapped(*args, **kwargs)
+        
+            @pass_through
+            def function():
+                pass
+        
+        If you wish to implement a decorator which accepts arguments, then wrap the
+        definition of the decorator in a function closure. Any arguments supplied
+        to the outer function when the decorator is applied, will be available to
+        the inner wrapper when the wrapped function is called.
+        
+        ::
+        
+            import wrapt
+        
+            def with_arguments(myarg1, myarg2):
+                @wrapt.decorator
+                def wrapper(wrapped, instance, args, kwargs):
+                    return wrapped(*args, **kwargs)
+                return wrapper
+        
+            @with_arguments(1, 2)
+            def function():
+                pass
+        
+        When applied to a normal function or static method, the wrapper function
+        when called will be passed ``None`` as the ``instance`` argument.
+        
+        When applied to an instance method, the wrapper function when called will
+        be passed the instance of the class the method is being called on as the
+        ``instance`` argument. This will be the case even when the instance method
+        was called explicitly via the class and the instance passed as the first
+        argument. That is, the instance will never be passed as part of ``args``.
+        
+        When applied to a class method, the wrapper function when called will be
+        passed the class type as the ``instance`` argument.
+        
+        When applied to a class, the wrapper function when called will be passed
+        ``None`` as the ``instance`` argument. The ``wrapped`` argument in this
+        case will be the class.
+        
+        The above rules can be summarised with the following example.
+        
+        ::
+        
+            import inspect
+            
+            @wrapt.decorator
+            def universal(wrapped, instance, args, kwargs):
+                if instance is None:
+                    if inspect.isclass(wrapped):
+                        # Decorator was applied to a class.
+                        return wrapped(*args, **kwargs)
+                    else:
+                        # Decorator was applied to a function or staticmethod.
+                        return wrapped(*args, **kwargs)
+                else:
+                    if inspect.isclass(instance):
+                        # Decorator was applied to a classmethod.
+                        return wrapped(*args, **kwargs)
+                    else:
+                        # Decorator was applied to an instancemethod.
+                        return wrapped(*args, **kwargs)
+        
+        Using these checks it is therefore possible to create a universal decorator
+        that can be applied in all situations. It is no longer necessary to create
+        different variants of decorators for normal functions and instance methods,
+        or use additional wrappers to convert a function decorator into one that
+        will work for instance methods.
+        
+        In all cases, the wrapped function passed to the wrapper function is called
+        in the same way, with ``args`` and ``kwargs`` being passed. The
+        ``instance`` argument doesn't need to be used in calling the wrapped
+        function.
+        
+        Repository
+        ----------
+        
+        Full source code for the **wrapt** module, including documentation files
+        and unit tests, can be obtained from github.
+        
+        * https://github.com/GrahamDumpleton/wrapt
+        
+        .. |Travis| image:: https://travis-ci.org/GrahamDumpleton/wrapt.svg?branch=develop
+           :target: https://travis-ci.org/GrahamDumpleton/wrapt
+        .. |Appveyor| image:: https://ci.appveyor.com/api/projects/status/32r7s2skrgm9ubva?svg=true
+           :target: https://ci.appveyor.com/project/GrahamDumpleton/wrapt/branch/develop
+        .. |Coveralls| image:: https://img.shields.io/coveralls/GrahamDumpleton/wrapt/develop.svg
+           :target: https://coveralls.io/github/GrahamDumpleton/wrapt?branch=develop
+        .. |PyPI| image:: https://img.shields.io/pypi/v/wrapt.svg
+           :target: https://pypi.python.org/pypi/wrapt
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
diff --git a/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/SOURCES.txt b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/SOURCES.txt
new file mode 100644
index 00000000..2c5e9d24
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/SOURCES.txt
@@ -0,0 +1,12 @@
+LICENSE
+README.rst
+setup.py
+src/wrapt/__init__.py
+src/wrapt/_wrappers.c
+src/wrapt/decorators.py
+src/wrapt/importer.py
+src/wrapt/wrappers.py
+src/wrapt.egg-info/PKG-INFO
+src/wrapt.egg-info/SOURCES.txt
+src/wrapt.egg-info/dependency_links.txt
+src/wrapt.egg-info/top_level.txt
\ No newline at end of file
diff --git a/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/dependency_links.txt b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/installed-files.txt b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/installed-files.txt
new file mode 100644
index 00000000..20386bf5
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/installed-files.txt
@@ -0,0 +1,13 @@
+../wrapt/__init__.py
+../wrapt/__pycache__/__init__.cpython-37.pyc
+../wrapt/__pycache__/decorators.cpython-37.pyc
+../wrapt/__pycache__/importer.cpython-37.pyc
+../wrapt/__pycache__/wrappers.cpython-37.pyc
+../wrapt/_wrappers.cpython-37m-darwin.so
+../wrapt/decorators.py
+../wrapt/importer.py
+../wrapt/wrappers.py
+PKG-INFO
+SOURCES.txt
+dependency_links.txt
+top_level.txt
diff --git a/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/top_level.txt b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/top_level.txt
new file mode 100644
index 00000000..ba11553a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt-1.12.1-py3.7.egg-info/top_level.txt
@@ -0,0 +1 @@
+wrapt
diff --git a/venv/lib/python3.7/site-packages/wrapt/__init__.py b/venv/lib/python3.7/site-packages/wrapt/__init__.py
new file mode 100644
index 00000000..7be739bf
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt/__init__.py
@@ -0,0 +1,16 @@
+__version_info__ = ('1', '12', '1')
+__version__ = '.'.join(__version_info__)
+
+from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper,
+        BoundFunctionWrapper, WeakFunctionProxy, PartialCallableObjectProxy,
+        resolve_path, apply_patch, wrap_object, wrap_object_attribute,
+        function_wrapper, wrap_function_wrapper, patch_function_wrapper,
+        transient_function_wrapper)
+
+from .decorators import (adapter_factory, AdapterFactory, decorator,
+        synchronized)
+
+from .importer import (register_post_import_hook, when_imported,
+        notify_module_loaded, discover_post_import_hooks)
+
+from inspect import getcallargs
diff --git a/venv/lib/python3.7/site-packages/wrapt/_wrappers.cpython-37m-darwin.so b/venv/lib/python3.7/site-packages/wrapt/_wrappers.cpython-37m-darwin.so
new file mode 100755
index 0000000000000000000000000000000000000000..14ca26dc4b80b68e2ce0a43ea6469f5e334a0355
GIT binary patch
literal 74040
zcmX^A>+L^w1_nlE1_lNW1_lNm1_lNNRtAPv1_+R3U|_hx$iNUEAL1Gj0ue(Y8&G%*
z3{XBR0|NsLgH*)Fm!wvdfLIWWZr%fDi1<cB2o2)1K$sAc1!gjY8y{a>Qkql@;V@vj
zZ&E2l>JuY`2JxXxumA_tJT@>7q#tBnd`4<cL23~e^IFOw3Ijj^#lXM-;xj>nASBd0
z2nS?da&bvfW?ni}0NuY=svr_FP*1}65K#sO6tltZDa|X-%u9*S%uCA$^HJmDLmfmX
z57d(&KDrqY6%Y~;uP%-uju0_035`Dn2QUvFpD;ei`ydR8Tab$Q_`IC>;>z5l{G9lL
zk|Kuqc<knZ(iTWPC>BBb!E6Qw22l71!#H4nrRO6h6Abe>pzce6>H+c5%|lM6@$or{
z_}rHPHE#wqut0od^Vk^}7+`jRtc;IONi0bOu^<@3zs(SF4j~B5(9FO9jbf+(Dit4}
zRE$W)pmc*8ejHOF>MI}=m_&9TG`hj!AOaMx9^i2F@$~a{b@7A+#sQEl10p`5d<&>w
zK<0ojR272~14DxWh;x8}VFd#N!-@k83=T*NUL0g#=-^;rC^*Hy@PGl5UL`<f#c2kH
z*-Q)!5kD9h9%wKya5FG41c1z8U|=w2f?!aXS}`#&K&^(#qEa1S85lOB3W0gj3=9k)
zd)8}RKA+BQ6W6#gx#XAd-KQ!H3=9Gw6`WiQZ2VjdS};CH3|g*PK=p#6nlXium4Shg
zLBF)PNIxet350_yi%U{-^_=ulDswWEAR5)6<^-H(VCZ0AV9<o}Z=8n6kCLMyFd71*
zAut*OqaiRF0;3@?8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*OqaiRF0;3@?
z8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O
zqaiRF0;3@?8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*Oqai>{2!wicJ`E1=
z==}ZSg%JaTM{~6TLkWjR=lvJQLEP?Y0gu-ICA=QToxz*uJ^u5T$a{2GOMpbxJdQhq
zw%Rjz{O2!~^yqfy_#a^KS^&y`sw%zZ(d`T}(4+YXM|AA}1)xI$()9S{TNprK0?2-!
zZg&aa)&nI<9-Y5Hx;?rJBs{wvI6S&NBs_X;ADb~S7#@gy`55dX)>+mJ3?AJU9=)b3
z%@`P7Xc~fL5A(}|Pig>}=h0mcb|72qOJ|6lG`OBpke;sw3=AH}5B&fC|9|XZgk$*S
z9T?E`+e7rL!S$Ph^lvqQ*$Gzf815Jr9N^jc&9n3Wi{*w43?7|_Jz5Wx@OgCJ0DIdp
z&M`jr@QWglwcVy0{TUcMTECST`*i+)Q2|okdZ5J9v-7w|=W(CTZ-$o)54^Ai33nd$
z{Lf#y%d^{^!>98<C~((#^xD2LWng%57c76wqmwn+pMe2nkUmJH^+4%%!`rXh5q<*s
z2j;$O9-W^(JO6uho4P@qz~8cx0Th^`YGD4QQdy5)+r=RBYeA-U9`fj9<pxQ0n~H#_
z&ijTZ4G+9n2J(kBtECkK1AogTkO{2^_*=Gs82>p+xjec(I6OL!{|_|qXturV$H2f)
zD(ul~8w4^}6>Ki+K0gKq&+d8-kIs89W*IOrv>qr;@Pxz~e+%e@43ExxKAoQsf%kGA
z$j)Bd-zE$UFCKu{ogmNG`GK9E4x(Bw@wZ0(|NsA`#sB~RJ-TgYSu%iACOare{SPqk
z>HLpjDuV$7!;9N`poDL$334faYbwYHU$7CZ`XD2~NzD#q?>;>Sh8>_pgK*bjl<@WJ
z{O-~D+oQW4oNKszI=?yozutPFR1}&SN)SR!h6j*LL5(kPeuSAqQQm`E5_>oemf=9|
zMam!6`k(-0b+Z6bpga|7%)s#CsxC_UMpFMA6jt4=VsQ1UAoWX8)kEBW-1P$}hkNw0
zzA|B8@aW}z=*__3)9d=d0~AWvJUR~<9(ZA@3yS;_VUO<851>-VRtpptppxP+BBYV?
zD9HW60fyiHPd8wMXEezd|8+ndX90MTblh11Y&$du#^NX+&g+6a&gy0kqQL$)0{LGD
z#s6Ucyf^_?N|ge!S_4#mf?`$!>_}*-Y5)=im8#%_RdAK6AXTNeK!IfjauK4$76ox>
zRbnHjA5eMX80r`j9N^JhFTudyBFV_W;Mw`rv-6KfXXpiw?s|<ES|G=C9`@;c|3V(b
z?6r}2!KDjQcFeQ8Mn&TVCx`=Ld9)s=voyR7Ex$n}!ZlE2{%7z3rIULeorhjr2MZjA
zXuO~UDm!XaG(0R1*IamU5F`h&Xd8&xdZ5n2)$l*ey*{1qeLDZWXa-6BXYlGZu?8D^
z2rT0H|2W8jbs!ThkJns&u|o^w!lRzuB`OA>BAOQ@y;X~Wq4~%E5<idb(hG3W`A|^{
zkgFtoI=}gJzIxH14N8WGJ^%BS*n9r>WhhmD?e5Xd`W%!2z~#s{eFlaXWm*gjuem{r
z%s`4t91L&6Yl;}hSV(Q*+3m*R)A{{>fB{HN={?VGR}P=f_n=zf5}MorB)Kh!qJ&@G
zg#o30x`wNK;)j(_Y>*<$qZ^V!5ycg#1Op{JoFy16ydn7$sT2gIathiG5Ql&aAv>$U
z3qe@@j!3_rov=Ew^ZN^5P}u{o$vW@9uz?EnZ39(po%apDy|9KP5RcB|FB&vJMHhd|
z8i?cz3oQnQeV`MNJUZ`#60I0icgsQsP@+2G)A`@0^WBSRRR)I6!>tEO?L7apsCahQ
zgUbRQP;Q6SRKA_*8osUHN|-(Wvw3#cgToz7LAinFe;$=mKhJI#6%L=y=b&Wa=+Vtu
z3U04}3bJN>28P!v9^I@-aG_}+AwiFBR)4tA8c=$Crp~~y8&t1B3rk0MB1{9f&oIhc
z%@_UZpoEU4yglv=iKpYvkU|V<Bue=UDNidjKt|ku!Nm-rGeCSqY32^%(yBB=ZGR%C
zhwGl5-$1Fi+1ApHfq|h!$fLX3!o%9u#gKuaRP!b1s4I_N+nIU{3@;)<F6?9#as$;J
zU`6tVpcWCRW&0vY4I<ilpv2MD@U7uD!vileK!FBIU>@M|@(3suZBYl6W;`k$|4({$
z@~E^PD6#NpJy|M-9I>1)`qV*btwa};Pr*gaFG%A77Ev$R!A74f<wnxy+xovm%+>IT
zN4K+vhc#=H0Rsbn>kI}4hSw4=&Z~mcB&g8|kxemRU?{Eg=&p9~=r*-?Wnl2I{9eMp
z3v?9L3mJ7#0mxVy53=5)*K~<41H+4pAP=@4C~@`ZE|&1<t~LNQdOHvKbbj>h{NW02
zCswF|EII1i`lLh<oLrGz=kcGr)ESgjx}7;*o4*KC1&5ab#I#@yWOEGgnd8&>0yQj9
z8uZjE|NOu{Z2gZ^{#k=l28xgdIQ&qAKn14<s9r~urMQ}Y@bVg*Kd93FgB9cym7g$w
zf(!v2kp^m2gL{DvAb)|{E8w#LK_wWrULZ&ns25lb4h9jA<IbS-=Rk&(!U{Zi2@NT4
z!1*yG*s~Ln8MiorGGm2DH|u66Q19sge@i#0-0k-E@UZ4xt_#X{^B@8m9@f0mKmx6x
zf)^Ykmd8uPJUWj-QnQ~5I5qRPHi6ZfW`Pa-#^1sZYEX7pd%XCd46&kBm63trr9Ts>
z^=kW1i-F-qnhL0_VpRue>1NdcQLP8~TT4ORg<jickP>Hz5+0Bo#C4#S4u5MBNDZjx
z+->?nhk@b6dS!5y=J)u|?9q86+A#)F?Sr#ew?BtRccTEvJ)q*EoDtN;`{vX6>;DB0
zkiZ^r`ElH%m-VY1D5pAel*o8=n@(^9x$gc86=hJqJL1uLpp@gqUquE6SHma1t=~%7
zeY#C+bQl<XTc3FJ+OD!;VDRL3{o~Q?ZQ#@GY~j=G?cme-&!_W~;ejlM;Frfi!P9Fy
zTb+U7MII<bz&$P-kR6~vI;{i=q!xX!V~&7(eMpXe$^dfob4Y3R|AJ@daf~9sxAi}N
z3#gOe(OvD~(aWj<_KZ76iKIuj>0t*3h8IyvNIo$}^~ok}m`@&9<MYY?VviT^6v5pJ
z0gukZ*h?eN|NQ)|CI9~aM|8Gdrv3x<BUyz&ZMtp?kM2MTk6u$DO$G+T+lB{TtX5!P
zc<BUD@*Y&x!<D?(fGX)wU|;}wn!i;FY%;v6HoX0k<KO@P80`^{<~J6eo!7zL^nV`5
zT|u=CgU4~#4d7nFan~JSx|?-7$i7b310Kg+FMx$QT~By4+b*|<^|gP3B5fzg)19o7
z>_OeG&=Wqru@`)LQxEv`=I-$56`cX9y?S|L>_D=vD|~u=H+Xd3_v!oxw!|^WG1xK0
zG4#b0d2q-nv>qt2^y&7!;M;n#l-sA9*9K&2r|$)i?$Q+=-Ju&ix_x)}bh{q#Xnq53
zww9iF(I^MjD)5>W)|ImWC39%|7TVs2v=`C(bB-_cLDqH>(Y`qD3@MV1J3|T~Sm^?9
zci?CbgZnQY&2J<;JFkGdF8@8c>p>NLv?i!5X=7kucri~7R5#aKv>qsN@$G!(YWM_P
zjV}bb4U`Q_K_x@yVNf;x79`fq?eYJlM<;jd0pHFqCFY=dT+{<H`UPsAalSY%56<&C
zprH<*&UZeYk35=>NFV~`r31*A){~$y6!4(g{{RDzZqpmKpse<-gxj;b-oeB2d#U1U
zJ&#^n5j6&e7gs<!z&Uf1E!edn1+a=cSsq-}l^XeWmuvV~o7QVEFq8y(cDqY}D(n9Y
zh6j9GpZIj<UhwF2J;C2{kO5RwynT5D)L;dT#`K!*RApdz@eC9WoyWjM4-+Fe*YURq
zGJ;b1OHpvy^!|kv$Q__SaR&>eUT`&h;?w!w@Y@Rxn2Z~^pg88)T@D)6^EL)q_5?Hx
z02)*B0VNiYeg#me2kFm&>;<=|eLCNRTGU4%YJ9p=FYvc;flUK7F5n($wg3PB|BEj&
zpfcmAXXg=c+Zi&D1WLJ`4<OBwhq53AM?AV~Pk<^g)RuMY+Y$kAhl>l*sq*;G&EE<-
z+!Gp^uM=M^mtkP=={D5^wch@-`L;gcZ+-vo|Nj@|GN2;(Fxc{P36KMPIzRbzzVwAz
zo&vSJ^>ztAs0qXcH51&=<11A~bnRYCdUUfc1+}ukJx3N$OxVbP1I?%N0cyfI3^M8&
z!l>7`ko#lM`qZQOje=(<s9d?h04m#$yPg2iKHawIYM`>Z_JT*R>jjTa78Q?P);*wV
zte3ae3S2zg@agqEfjn+gD-BM`)*j8aj@F>68N{*&v-~`oZEeA<M33&;8y?-J2S85W
zF9X(*<k2lFV9mhrx&&M(yaWepuWgbF1H%grSy14yezXFW8@3>dp>&f+uWcYm@-;YQ
zS?@q3K@>yje2-pRK4k`m7v~_7$E_F`UUxxlZ35Zq39;44qg!?nNN0EH4UcZ$6F%Lp
z7koNxRD3$W`*i;D?7aEniX_O=BMP9*2p+8ko6>8#Ly3Xm#Zzfe20Tz=<Joz{qubN~
z)K=|0{^AWd5IlNW{Xs_e@>+l>ewPy--IAbUzjUG}QtzzgMX?m9i7TsQ#lY~g_&=!l
zhxO0Hf<1a|jVu`$_I+h$V0e)MQV7Zmf#5WL%%fM96*PX)dH=;tu$BBB=HOf+`wcW4
z*K7M8G{Vv;`vW{KDtpox)L>yf1nT5<vK}@C2kIGf28I{2p}K#9qQBSn1gP`Z4iy8{
z*B-sLJ)rTgVyM_fh`I*Q;AcEkY%fHt1~d-t4HW~ABKL|WfNIMBtdSt9*L0d3C{cZX
zF$EN|y`tVAH~wdJ0a3lCtx(Bwu%u}hC};PIn(ITfB!WdnCCor0xU7O8s@HTiRErBl
z@-b*6`9JF&5Y=m%0+m#UNS*@)!GG4{Agb3i1}e!V$-wZM+12oyPv?hd$2d@*6w*Hk
z3x@XkdQInm+Hk^B;Izu=(H$+}(R#Z?+oRVM)VTBM{Qly*Bt*`)yG#PqN#_UGn%_X?
zy*wxZiec6tpr#Qx)3VDkFuZ7&0F5Ss6dZ=OZ;|ZN0ok`0Y9_LM!dj59Xot#Sv+pOw
zz7Dv3(`6YLUWntfZ;vJe!wX}mnaK7n)`Zw64wb`ZUp&M<3AlagAp4e!<ME#l$iBl8
zkT65G&lqaoa;O|O`*<PttpGJ-z~Q-FhJoRQIX?UDXfQCm2!@)8Y~NuGi2ux?a#-yX
z2gjcU+&*`ZeaFSn{Rf@zfsLp2nx=tly(^CDOB0V?Q(v&H|6d%0%K3KJOMv?265vz=
zYHUL~ub?95r3A$KoeCgVfW~l6NHZ|J@BmwnP=?vRMH-(!EC$lcS_U_w6=Vb}*a&!f
zM^Zl@qTUUzJ`|+>swl|E*wo(?1)DDhSFZ|EzZ9qX^APi&f!YAz82l>5!0@6}6grU-
z3!Xnga?fjs`P<;?kAl>@qMGlBlz-lU8bQfos38x^ub07f+V>akP&sVz-3hVH18!S5
z$hPYuAm8K2uQee1K8s@7mkhP<I#dpuea;a3Zh%TVaA-W2WMFuaj?X?GkbTWiGm+Et
zXH`ggPKU~2v(H=v99tQ1`#{C*i|@jC{5J<wYYIcnM7FOPYTtJeh~Kf<R|>K32dEwg
z`;Q&uzjl1~>45B83^fzkK4GYR?NB*v_T3f+`>z9T-*gF3`G;;FQu>+*(GdpMkqXlB
zR0tFz*wQ;AM7<hZy(vijR#f$lVZq>ukP<I=H4>@<iJg<e;Lu_LxgOND_v!rhVy6(K
zHDTo0c?={58aQExPf`|3c>HI6E#cA4x)Ic<0SDs&aR!DL4j@ZG69wRuiPFBqwEwCy
z#C}gK_U996zaiXy2ax>-1VIgc@K^=7>I4mQLgyDS?GJ_8e^Ln3{}%;`^gpQ81@XTa
z$p56+e^m+Ue=PPV6KTI8+<pg;{Raea`X5{$K$1X!XD6as@CUo%|BJ(duu!N64V6kj
zhFM-*5g;xgR)E@r;DFdJ3abBL4neLH2&9LjijeTIguA2+>XL6Stci4nBHS4UAZKjg
z#~B`&>A@9h|4so|NFeOL$xmD`ECjXfz`?LVgn{9O0V(z$Re<^*i~agU+OG(=-vDI)
z20l{!54C?MKc@d*^AYKPaBB$EF4!Op@;@o|AC-stAB+9&MB1+ix8DF{{{~(>{)ZQY
zKJp9<FAnp8eT~wdHio)rIaCf*LZY{)J9)w7`wDQo2;}kYLZI{yviJbF6vS*#-vPC_
zgP~?3*C&Ueod9#F95(ybL+rDF+vg6l?>G;rc0jcclGzdEeJa%So4jD3qLlYnd5DYO
zgW$Fj$eR}g85mv!fUG69yniYSiQh;p_O}vgza!lK0FeC`xN*fVQhjO)GP|3V4X#xb
zq;)Dt>j7-_=}U<EYv6_u$g58U7#Lopaud5AYb!+mG`RkyApNS`xZ0D35cO$r^`#*7
zU%5aris3$PkJd{i%CRr)Aes!}njAoy4sgLOiG8`An}Gp7UxO4Lb3s1oX8i_k2!Z^^
z3ewt&Y9DAkCO815%LZCw^qRVW)K-E_XgyG34H}{YEeHd35?-itgT}0mfrfchJrF~R
zpdq3zxLq^(85mwja)E}3KwVtq>H$9C2^w$3W&b&l{-0cs)QW2VaxR?qbHVMG1nHm2
zNzi_eZq}pVb`>aqz?P<PLLvl`pF!hZaQRChLtW72TjBDzK=LZ+@~Lq7T_AZD6nSua
z?Tvs(=Lg7e*b4^mn1<;NNR84P`og30xZ!~plQ_VcS>Lm}4!oX_&!fBa#fwXvpaGS`
z;3gU$+!|Rv28I`NIiOj_qxpycxWE9<pTlCBE%s$3NOL#qHBb`_T$MfL1*LzGKIHsT
zE&*C+BIwa=y9Lzhy98SPs0GnF8?JQ~NUIiHD^hz5sXdYd@?x(osE&ET!U2iedPvfG
z!3N^@n%)4F|DDHQY-NW?2ZCmW|1*Hn8uI95cbSAw=YR0(#Q7kNkkyH%o`}_nnjYP(
ze?W~Zu+O+b&g*7pV0eu_(22C%1=R07j2NPa#XrLTJ)l+}#Qz{myIE($Em+0F!0<wg
z9g=d8+dm$l5$4u!CHx+}wv3=!@<8m%O%UC&aNRi|-EY_+x)Ck_<queV@p|-{9_3|V
zh<*746g1te@^IaHAl>UQbi>>aS$`_v(fP@v*R%<2#AApNcR-CTaQM9DW?*=c%?2-2
z(lE*sko|sO%_$Jg^WmD;fi&w8XFn(+VqYdhjEI37kqt89H7f!8Z}Wit&jzlD<>8w3
zK$_RFqK6*D|DgN@UIq!yHF}`7K(FZ}9#9SS>BVg}(4547@LC1|k6zO%h&U*>ymVy+
zTLa36kQKhixfmE;xWn8CQiGIOu!ffz*u5qY_vV9IcHqFS0~z#>g#`D$<%YOd8_m5}
zA>tsnyqwAcb}t9q8gY;{(^;Tl3Q|MJy?xxE@NI#3d_O2Bg57(LlY!xd9}f2-r_Ttm
zeiw-Tdbs{RkbXV_`l0Dl6l{bu#0VR>5#AspPBTOO0$xH29j^w}UGd<7QC3i2Rm`KC
zl@+c{1f*>Wcufyv@iX%3O+@-={Z^t0o#*_|2J;ZgRE%e5oP;5yndi~XItSb|163Gh
zpfEOuxhonpj*G~?;PMtyzkwFJ{bvIef1s(X*8e4}9{>4EGJHGhBs{uJeL!QFt=~%W
zJ(_LZl^8&CexNHYK`W_0f*cPTexDDj7CH}mcGn4bbel?nbaWniF$*;72b=0^{_&r`
zWgkdg>&en`4@AwJ4eiiFt-A{vQf&QJI?<!owgVJGpnDxX!1H#8KvK=NAc}#%wHmZc
zpw~7Zq~-_6A)Tx%K?=G}XMwv=|6c@wL_w<#5<#<Fa$wh(wn8OMA(ADo9&mSOfwXrX
zdhrM}Wq{=F5-wN6Zyv2DOBG*pJMIH5{{nGDko>^$;s7JKGI#0xfVDBfFAp9+_vm)l
z04+9P_vmJo0kxg}Gl1GwNbB+44Pf$rK+PLWc?+2Q9qjTBF!?>$<vn2XbFj#R(gS$D
zH^8^^jBn>r&(3q;S@z#A&i(!W9~6|JRZ*R;S3Li-!&c0oBt@UjY7I|NGOfm3i|5l>
zY~cCdl>xR&22EqO1!$R{Pj{^VcqK~f{}MC>$qo=(!K-7?<f=WoU9Uhw9=6i!@Pz-W
z7q}Q0{;OUBlUKmxH86PtOx^;McfjO5F!=yXJ_3_Zz~nP9`2tM70+VmR<U26=0Ze`Z
zlV8B(H!%4FO#T9sf57BFFbTTE@xLk)H^@~iV3G|?a)3!LFv$ZZ`M{(Am=pq&B4APs
zOiF-BDKIGmCgs4S0+>_+lPX|R4NPi)Ni8s`113S$(tlM0Fxv=Bnt(|&FlhlMt-z!W
zn6v|v4q(y=OuB$cH!$e|CcVI<518}=lL25d2uy~6$uKYpDvSTCMuFKeU@{I&CV<H#
zFqr}-)4*f~n9Kr`IbbpmOcsF2A~0FP&A^bsz`(%p(u|pbAp^AE=A{mZ1)5=fsRUwy
zu19?-0b&(^)bW8>pgj~XSwO5=AhBOe3=A3DL97oT)>#ni8Hn`|#JU4weFL#BfmnQ?
zRZk~CEFBPQABY8-)qlAK#7Y2(tpc$sL97KJ)?^TC8i=(5#OeXD_JUYVAl6k7s{+J&
z17hWYSe#4@3@=kaEL9LI3dC{*u>wG>R1nJz#A*StY(T7~AeIq`bpXWD0I}|YSfCOx
zgMpcW;iU*jOdiDI0<j!GEYL-{8Ob2lH%11AjCv3Y<cy4IAQq@h%vc9v-2llR0kO`3
zSa(3IBOul{5Nj8RCBVYK@Nxr)r2=9t1F`HttT`Z7D2O!)#3}%>IzX%r5UUQvS_ER1
zfLQxLtSk`gGKd8V&Wu+eRv1VObSLsl9}r8G6|}wx#PR^KEI_Pu5K9llY5=iRK&+`C
zmK2D!5yS#58q2r{VzGh5K7v?(7#J8bxY!sNUVZ|xG(jv-pC`i!#JUF(iw3c-fLJ9U
z)+rFH3&a9l1Dml7!~!jX&e#WHfmS?cTn4cgfz*8fu|Oqgh9Elw!^=L97^qqP64WEf
z@C1p0E;G(Z0<l0zG@}B<N&~6u1+ij4td$^E5QudU!~(4m%(w+&*@48~fmkLW7O2zm
z5_EfOhBOBQ!%NUEj|>A4OAI9I3u1xpsLm(=u|UgLK?@jUO<5Tj9E1O>_OdcCOz=<2
zN=+^aD9W#>WN=Q*$w^GgNktY5NGvMJOw7S174KG>mt2yWpBG+~SWu8!<dj&P%HWh=
znwNqm!GO*$hiFj9NGw*!%P&z#N=?mE$jr+G*_2t8n!;eCTC9**qM%x!kd|Mh0OQy(
z#K)(nmL!&x6vf9g6qh6xl@ym}mSix*7pLZ=#WTdmLkv!d2eDH!L8{{8Q}aqvp{$C`
z60lTqenBOek&>EP0Ou5?rj#b9g2kcC)Cw>!sj?)s7|be4ElVvbPK8)d1oA3aI5|JJ
zAP1^Aw<sTEWl<(rOFlSw6$(J%tB_k-T%wSYnwFWDst_L!u~$Qr0goDl`|ztna<7Ia
zL#P#UI3<;4=9FaS6*GX{%b?+}$q*l(mzWFjX<=z%4xE#lpHi9w5l_iahIlO}u^8g_
z#Js%xl0;B^L-_EphWINfRUt7?0pb@1s2qd86>?gNj|atZe7r(_nnD32T`A<Hrlu5w
zbS4(1m*%GCl`z0$N-7Id6-pAbQ^83gCpEFSL?J&fRRONBBqLQJEwiY&1TF-3NisAg
zGo(S2C__9Xjl@IKN@iYhNn&1dDwGRxZxNK2ng_}SDNtTgW?l;DZgPkL1&Ku<N5;n|
zfny(LJ~ZWl(wVwyu{tyypb1<7lwvcJN=s4|)WI<mAFs{;cLB_9n5{7Tpf>sY``7sU
z`&*&qA#i5P%`7g?%u81=R47U<EX^!RO;ISwFU|zTQDP1}R2V=epzCKy21RUcYDq?Z
z3Mea;WF|v7Fs~FdfHrW*K<^y~l{cWO7eqr!5(Y*_21Z6j1}4TtMrOu~5v+`Y+$@Zs
zyXbvD%Uu~57(n;PgY<y-pnJG^7<3sxWhes!10$$G?7+ytJb{^k@dGmhLjVf{;|CT7
z1_f3IMgukmmIv$%%mEw>j2}1{SQNMz7#X-3SRU{&Fi+rRV4T3mz}g_lz|tVZ!1zIk
zf#rcH18ajg1CxO?1M>lC28InX3``H?7#JMn8JHXt8JI69GBAPG%@?RMFc)YrFnrKp
zU`o(rV7Q>kz&t^VfgwPLf$4w_0|SFD15<%61A~Dc17m?61JeaP24)8X28IU)c&{Y~
zd6BBPr5s{hAC#T}r58czHBfpRls*8ZPeJJ`Q2HK}egUOFLFqqGnhknUx&V}xg3>Bb
zS`SKFKxro^?E|I5pmYM1&Vte<P`VCEcR=Y$P<jrOUIwK%K<Qmj`UsRh2c>U7=|@od
z4V3-{r5P$A;l%}|MWD1Cl-7XKMo`)YO1nYn04N;=rBk4E9+a+t(oImh2TD(a(hH#U
zDk!}LO7DZxC!q8tD18S?KZDXAp!6>&%>rs7FfcIiL1_sntpufYptKp3c7W1eP&x!k
z$3f`~C|v}lYoK%+l%4>kXF=&DP<kDd-T|c_fEp(Z3=9FWknlbLr48aB`~^^2ARfwx
z(ifmKLjpwH07@r7=?PGp0eZ=O0hE3Kr30Y%%O8N!5K|Z!K0x^j$q<nRP+B1c%7@Yl
zsSy4KC~W|0ZZR-0Y=F`R=}>tnZ2<0MFfc5D(gK+f`3X>3APd4rCmnEzqpL&bH{j3*
zGj9V_{sEMR>0eU~VJT!oXch<sC$~W51EBJtG8it&z`(E%Dt`fo{Bfv!Kn_F~y7?EN
z@&!<Nbo1{*<tISp(e=N8%5Q+mqw8;i%3pxWgX$xQOCaPosJufigpY1MLk&c{0Eav;
zRDJ^vd2y(GK_0{$bn}&=@*AM?=<bK;!cCrp>i+=MZ;+2&?IozX0;v23D2=ZAK2+TY
zsJsEFgMqGyfq~%_RNVro`~xVBuJjXBT|gna3@ZN*RNVxqx(iSmRTTrn0%&{*6hUMH
zpfrR>I*FbhU?yCFmRI22moPyF1_n^u1c{52ncxDI0*xFnFfb^9I&TaN3<Xen156&O
z>;sgq;0h5q0Hq;31_lNf)Rd#(4v{H<(ifmKsQ(6*0d@Gm3<xm+YA=Mxzz_i1kpbi2
zp}|{?85kH0(9J{R<1vCHDQc%bVlBs&4j$l02gI67thx)th9?VZI>2Q<JRRUt02Np0
zgffVv(9;2t+Q?Qm*wVunXnFwel0x>CCo+d}wtyFrW}uY|2fQKr3_u+w2op>an;r~6
z+d#oOh$Akb*~0)j^@TXCWT~QddN^7OafU((gvLc)fvR&LpzaY=T>=4hAE4?Q2&nr9
zRkwhEI$Z8X7spl3@k8s?05lB&RF2OF#F~emuAvSq2t;)tvFX~O7gZmOR{$C!g>jJR
zgis_70|P@s7%HDyJZh(BV%>!+9jM?)2gI67thx)th9{NlJAnzrx^Q6B4Yu@PNvHHc
zbZApKJt#~f+GeoA1Cd~gZiE)J{D)Zx<NMY@M8cqS9F$Ij(s@uCq6$I6^rP#8slZh(
zz~o`{Gc@x->m5KCCO=A#hQMeDjE2By2#kinXb6mkz-S1JhQMeDjE2By2#kinXb6mk
zz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQMeDjE2By
z2#kinXb6mkz-S1JhQM$L0YSx#gcQdF0jC0k0)b%1ASWEqE!YVHgPen%g1rg^3KT*e
zgPnsN3pffm3N#8h3If352?YfO8U-0(wm^YLfkBWPST+KrJIEOdy$fJ+0tEsndS@_#
zwccO^nNq+J<`nD(-eF<q5Fb$K3_9Pz7ks`$d`M+MDnno)#{{qh^so}JAXvn;B01i%
zq@)OZM1^ZnQGO8vT--0e#1nM53Fv5!)D$#HkP&D?VTn1VsYqgw^FQL<o#S0na~K+C
z2!c%xf}NuRGJ&Dt!8_3M<NyEvGlFG<LC1_FBfA|e3{o5Ko|>0hl$q?9lars!(C|Y5
zAp??TXlQtkWK?`WWxQX0UTSbjQE74s$WtJZkfKt!h@eACWll<baY<=XJm}z!)FOrn
z422os5Ogd`j}J&JDozb4Eyzg)i5FlIcg#!iPOU7@FG?u}DX9P}fgP6<@0^jEoDC9h
zU<6CJWG0uyyQh|TmZatemlT1{6`8<L*nuSJms$=|KLJT77@{5|JOfi0Vax&~$<Tt7
z#1gQhS3pEvi;Ci%b5aw*9^8NsbIUKvO)LSKy#pZ_oLYh~_5eiOGq)hWs3aaj`GQZ1
z0qHsck%b7vJEo*SVjLuO0Y%EWC^fMp)d(bU14Y6;wFDF_Ac+SM3BS_Zq|~B#$E4!?
zoKlD{UZBdPq=1xvKo!gb3;sY6bjd8s%?FD&Fo9zLYPMTWKE&4@=)(C$E}3PSDd4c1
zfFkLc7m$;foazX%d<IrIh#MAQlXJ_-M>cx}HZ?xQ8JTI|*w}zg+P5^PB(or=66E<E
z*cAAKqxS$dset@)Na{X;O*SYsH!(9W1)|~tRuyok-M}UtT$)r;lnBaY422J{DF9_V
zM999trX&JX@-P&BK$A;JtpIuW2byqMY7sc38kmtI)+awN9i+MgRTS>{2`J*o0XhRk
z#xFHJu_Ut$oD3GA$Uwq)1&UYz_}CqY>J4Zz&|u$zB8(E=2T-NpZask_jvT@lP-IYp
z_6CX^G)x~r1R<F--U)P~6)2Qmpa_GCEs*F33{lwWL;guwjz#I<lI{nFB1nmiq@;lb
z95YZ$!KF`lYGQU!Y8u#t4issZ)ErQ$2(o_yszfF@QO`gTba##q%?l|?gp{reP~{+X
zgnMcUs8$0RxB{DeFhYI<nmk<N4r~%gMI*X2%m*j1NPvBD0Y$<iu^8sW8|V@lAcsFd
z5%nzggq>S8fuZmPnj9z<fb@Jo5%<dnXObT%f<c+d8P55+pd+S0IvQA!;|WR9DL+34
zq@V*u0aCzCz$SqRpBd=Vu<%)cDv?<Q&Sxu7MN7aTwgDm-oLX3#nwOj!@0_1kk_bA_
zYyv~!4u~AMf`(PAAom_X6A#YGOa|L<0$BoD)m}grL#k_Upol}A_y8(cS&$kZl$w|V
zE{b131Vi&Olk-zj<K2q#a}m|*2UJN<&~bEmASH-k`T<cA?^K$ZlLBr*F@U!^GR8Bo
zGB5>Hdgg%+u7e&FR?MIfUX)l+!l1^&@L(Gw0|P50e}T{NgEaM)F@kb3=nyjusWpta
zPxOOm-oyyY>>z6jQi~XNFk(B^52E@CBMZZXekKMMh8dGV<cjG`3=B-5)5#$6=NKXM
zEk+iGhPfaW9ScF^jHOHr584?U7y}#(7BD_w3P><8NN_Niz_fw6U;*O>rho~I512PF
zHZXl)ZD1<+z`B6Bfw=$#CNLU+0Ek&&V9>w_ffJxKh*wYmA`Ki23>?7n3JMAe1qKcW
z7#$#>z@Y%dNcg}6l2Kq_5a3{703GxRJ0Apejy#ADI|sxODi1sNBLd2Yo%@jo<-^YZ
z=!EiN=YTAM@?qzG?1%DU=YL#>@?qzIe1-BEpyx{Pa6;S%JHJ60%AWw0w}kRx=TJmI
z`3%r=4H}_**trDDp?uhR0~euu*tr8gpnTZ*0CHRq`(ft;I79ic{pG1pK5Tz@3zUBV
zy6+owVFD;TVf(*9Cz*r!(EZ!5q3U7#wn3-lg5+WQwn1mxg7~m~$Dp%lL44SL&SY+g
z`(Wo&fv$}J$-~a6ngW$S06nh?bYTTZ9(HckX{bEx{HiBVz5w*xD$qI2AoUBNd~qI#
zeXw(_jG%nj`BuJAKJ1*U6eu5d-c>D>4?Fj28kElfJ@0Bgln*-x>nM~DI}ZzV4n4>{
zuye6KK;>cQW3lr>+y^@!OCHLHos(q?<-^X)N`UfV=VmoS`3s=uXDx^FVdrQahw@?P
zX}y5*VdrXb@<Hr}ov)=1<-^X|a)t6YK+oGsg7RVKZZ$&ru=BU(L;0|CxQ;>ju=BXy
zK>4t9xmfri_QTH4l7;eN=WJO(`LJ`ff}niZ`C2(pKI~krCMX|vj@B$FA9ilmMkpV4
zPSy!1A9gO*V<;bX4%QziA9n7QgaE|7uyd}ApnTZ5R$fp(>>R69C?9riRV|bcJEv+o
zln*<XYAcivJ7?+wln*;^>NS)PJ70=R5aJ%#Ia8WYKI~j6Hz*%=z7*&ZF;IGhohMZb
zm4}@xH66-_og=jw%7>j7brs5oog4KX%7>j3B`O4Q5A3`sV<>+E^!zA4C?9qXNEMV1
zJMUvDln*=C;{ucqJHO*Mln*<nLsb}J-vQ{k9UdS)Cj$cm^gO5(C?9q%R1=gBJ0EI3
zln*<%X(yBqJGbc)ln*<<=QETKJHJI*gn@yNlYs$t?u8AM4?F)L4$6n^f3Jq}4W>Xw
z*=9od4p9DnD8B*9e+cC-fb!pf_*@JO98)3sL02B~aWOCmK>6mP3=AN?1e6~K<tsq>
z9Z<dol)nzjH-PdlLirX@{$D8H0m?TJgP89D<tIY<0Z{%#C_e(q-wWj@K>4qr{0t~x
zR2*Vn0hI3u<yS!Ync@r#(4r0O;g@Lqui^|0Y0QiaeS9myCliA9)q_r&hP4kCNiZ;g
z&YOnu_ed}>ARo~MQm?|mzyLbq6l4L2@5hMB2bl+JUtEHk2jhd(gY-kIPw+XcApOXE
zkbYQu<Rw%;j1SWfz6Bn1*eU}914utIAEY1FeqoS=_y@*^={Ep59!WnkAEY1F-VuZ9
zhw)+h7oh1!=7aRZ+DC>^{V+aEKj=JgkP#q0$o<HCkbYQu$_J_+#s{e<Cj4OSuXLz>
z7$2q|9QjE8N9Kd<hqc%0q55Hbn11B&0=XZV57G~7-_3>Uhw+Kk4{Hx@f$E3xVfvBX
z3$h=X53(QDemo7;597o1Bc~UTeq=sKKdim^5UL->hv`T5FGxQ!AEY1FK4p-Cq(2xR
zrXO^EJ97LZ^FjJy?O9Ezei$F7A9OxFvVLSfNI$Io>krir<HPjBjzt9VLGDN9gY?7N
z%au_5Fg{2<DE%R)caVN$K1e^TeZ2sxAI692Hvln^(jPJ(q#xEEKL*tg<Abz;>_;vy
zK=vc^LHc3s_t#MUFg{E_xDSeCKQbSrAJ*RIm4?J0j1SZQ0K`DjkIV<@hxHG1p!#8a
zkT#I}H-I7!Nk1|lq#xFw@Q3P$@nQPG1t*ezWIk9wwEvM0(+}mt^nZXIbPX*(k@+C~
zu>Q(qsD2n9rXSfpQ1~PB!TO=+C~bo2hw@?ik<%MUKQbSrAJ!kb0M!rU!}KG&7o;DV
z57H0oKYf7ehw)+h7l0T@;fKrz=}(A;L=d+OB>rK1kTy{KHGmjM`jPn{{jmO;EL1;?
z57GwGkL+KN`;qw|{jmO>F;qW{Ppp1ef6x!AAI2wEKdk?l3Dpnd6RRKA-)x2Ihw+Kk
z59^=KgX)LzVfvBtD=7Ss`JnKF^=G$1^~3lu{mA7tNIx<kq#xG*y$ID0;}feN)?a=D
z)eqwns~^_CW|xJeKNz1_{jmPH98^DyPpp1e|J@v_AI692M^3Mx@I&T<!VlKp4}j{2
z@rl(B8z0Do>WA@()ejp_sE6u@@rl(B8-JJz)eqwns~<LAu?ngm#)s)gPH&*_L*|3R
z4>rDW5UL->CssdfJme-+Ka3C4PpJJ48$bCD)eqwns~<MrA|(e&zc4<r`eEZUwov^r
zKC${?<2i{?{V+aEKXQ2i3V&oiDEwjLKaEiRFg{E_q40x^7cGM7hw+Kk4;x<sU1tpH
zkHPrF>W7U-J%rg09giYbKWzN!KU6=APpp2}c$bVkB>rK1n11B)0u=tpd{Fqq#>cFo
z`eA%x^~1)~BBA<We3*Vh@dq1!Yk=y9@rl(B8?Rdi)eqyt^dskYkpGeSApgV0_x3^c
z!}u`$$o&<Neq=sKKWsekI#fT557UoqA4oqkAEX~Pe)tKhAI2wEKWw~_M*$N5Fg{E_
zvU@@HBlAJ_!^S7op!#8an0{pUg7hQvLHc3inKn@UFg{E_q4<Z5e}+Ky!}!GNhmDtJ
zL-oV>F#WLeVL^P5`;qw|_ru0lo1pq(e2{uj`w>|^HvO>i*tt;sFg~&RVdJ;kp!#8a
zV)etudoMxt!}u`$$oU=Qeq=t#|FH4lH&FdBK1@Gye1r5O^FjJy<H;<Fkn{)R!}KGk
zH;{g0K1e@o{8=8VAI692hu-xA?)HH6BlAJ}VdK^2Q2j7IOh2-HApOXEkbc<sb|6$g
zj8Ck7*m!t8R6mSQtbW+|c^6baj8Ck7*m(O=sD2oqSpBf^`6E#MFg~&RVdMFaq55Hb
zV)etu{~44Z=?BJ#=|@hlp!h@PgW?Z1Umy$B597o1BgZ#LKQbSrA2z>W4%H9i!}KHD
z2hxws2kD2+M+8Fk!}u`$$o7HsBlAJ}Ve=QcQ2j7IvHD^29sN-KFg~&RVe=y!q55Hb
zV)eu3Q_e&6!}u`$$m4|||0DB3{)f%KyoKtA@nQN4Kn$e*KQbSrA2wgZp$thsFg{2d
zG37UGe#Zo=AI2wEKWsiI9;zS4Cssdf{-_J8AI2wEKWx5fD^x#>Ppp2}{M3D@ei$F7
z{~}~~244Qc_)IDc3~EU8S#nT5Y(C2d%7@KoMMC+o`K)3nA2y%Wqr$)dt7}14E{4j(
z=C_WZ$v=k5!{)oVRZ;aDszTfcoBs+zlP`qI!{);#qsecE%ERWzE}+T3hRVa{%Q)4b
z_Ce>v^r3v%d|4or51TKG0r8RM%bKBl*nHV?C?7Uob`Hvi&6oXv@?rC3a_S5WYFrEq
zu=z4aC?7UomJ8*>=F1j9`LOx2i%>pnzKl@=VjgV1Oc%<B&6h<%`LOx2Rwy4fU$zm-
zhs~FP(o+hE07WhX1LzuA27X2c2H1F?2<XaiP6mb!Xa^B=M<72Z1H%mH{Uo4Ec|rUJ
z&~>N`3=E)YTM&N+^!#qn@%|wGgm#DpOQ7bhfZk^Tx`CCEf#DxSmVx08q)dVHf1>fZ
z85uxn9xgAB#@9#VyQ1*}(fEmI{BksY8yX*U1u(LE7oy2;LgSx6<3C2@e?jB3F`@cT
z7L9L+#&<^JhobQ_(D-F&e9*<i$o`p%CJ(xf7+HQdn*0Sc{!=tQ6EmuRq|o>#XnYSe
zemok#3XMMrjlTwse*%qv7mfcGjsFjg&&z`9emOM0AsQcaJqB`k1)|BPpz%x4_@Jwl
zk@e3+lV5|zKZwRZkH)``#(#&#=V3+lp9UJ=0gWGo#!o=w7ozdo(D?Jw_&d<}r_lJ1
z(D+}__-t&b?w3I0tE2I4(D=S+e9#5a$l+gzCf|+5pNq!dipD>V#=nNfe}cyUipJ++
zNA;gP8s7ko?}WzpN8=};@$=C5&1n3IX#9C-{4Hqw3uyfNX#DqR{QnR>yauvjVqmal
zVqgH>g>KKpz~I2dzyP{1-HC~T0aVtxFflN=GBGfKZcX=QVqoxLVqoxPVqoxNVqoxR
zVqgH>s~*V2z!1d5z!1#Dz!1X3zyK-}!<ZNt!kHKtBA6H$BAFN%KsT{RGchp4FflO1
zGBGg3F)=X2GchnEFflMBGBGeDF)=VCGchoL?rsNN)1Jn}z>p5QZJmjMA(M%LA&ZHD
zA)ASTA%}^9A(x4P0d(7YJ`)2&0TTm5Ark{b5fcMLF%ttr2@?ZDDH8)jITHf|=x+E*
zCI*HoCI*ITCI*HYCI*IDCI*H&CI*IjCI*HECI*H^CI$x3ZSu`b3=E*Mxs{25p^b@w
zp`D3=p@WHmp_7S$p__?;p_hq)p^u4yp`VF?VFD8a!$c+qhRI9}450EGbVEI8sV)Np
zgM%#-1A`qC1A{vg1A_+>1A`|M1A`Y6149=R0|V&(`YB8d3^SM*7-ljtFwA0NU>FU5
zs)j#<0z*8;8t?dU&>C;t>#ia4IG1@tq|p{~BUWW&m4+<ThDc$o8;1xJT00&eP>Hr{
z4nqWazZ__rBJ`$i=uSD12>QlFkQnlw#rS|q(0(!4dVlCLdYC9^Q9q0gTg;EfN7Dh_
zDi<G6iP$3t<v>^QLwSf@aUcQAO_NYjY+ELwQlLGGkj;@$0q|mfxCmr#9qJal_<%}`
zy>$pN*rqy!2y8c9d_X01s~db99g<K=3MkFN6yzcEpgZOeiXmI&Q26kDYzR@L&2gB5
zC>B6fB5#Mo&;i{ChanE%uZ1DwUxef%s1=YcZx~7ud#^C1AbYPc#Nm6dFl0cxZd@Q6
z1~KG7d#{iMK^xhS`QROF2m$ccH6$*&Td;T@zDo_E4J9A~VEfV#^5}trEi{7At;XVi
ztR9E@7*c#dx0%HUR6_Zn;tCW?NL*ME1=}@-su*=g8Il}mBP)C>8A2SkhYU$16Pzxf
zHbS={BczeGks%49?jOUDfbD-mmVoRS!w^9fH82-I_wu5s^ejn5b|q|A7($IlVsQpi
zz#;7eL#XjA2E_=HD$vd^gf!BwE+jQbqTsz=2=$nrgo<Ez38|EYdNLTaSqjOh60q-3
zH+{zkRAO%F0tsSl{*Di*MBU(pECAWr1s8#q{t)#@4uc9n-32O!QFoGq3_xt@g7HA=
zK!Js2V^@4YB}^1!M;B<5Inq|Hcn}|H8#<T|iYw^0bc`M7kh&HUmXKXtDXEFz4eyvT
zMX3cv44A?h-~<D1Lm-<|42lU1(e%`k#FCPtd@SY^<CZEZN-Rq)Do(|sD;bnMv4|IC
zCTAo=w;f`-ITM@<FkF|Jmsx_vti056s3@pZLEr0^oS2gXmB!O%K@C<s3W+lgEky8{
zhZI0~lp%Q?k19k+zzS1P7#5@!f!lkie#E8%$#dAG(R_tX337a6lTS%4PA-a1Pld%J
zHZ@3o#U_pLB-S=DZa>5$8H}725<yKMP(YO+ix$Inv12CBT-e@wOsOLH=6OuH6vzPt
zm=Xox{rH&Td9aP=m{JAs+8o2-iLkR0Fm)w?Ht}K#XXZhg9~egEKr4R?i6UGQiJ<l!
zhVqJhY$X#`!5q-0eM~2&fe&fG6wk~n03E~-k39}yvUuVWrUXx%!j#~PTbLSLQ3;dB
z8KN)+I71eu0DB0-WU+@dOcra1r-2Tmh);nwTruLVq^L9%w=B$L0*1hp5i$(iEXHs`
zPHJ9yNd~kKN6w?kunpjt2_rqVB(o$n7mKE1oH8k(79xi2si1AzSj<VSz*-w6=ND9B
zQJs>ST7W~UC^e-t8C&HEk%&*Nz+xV>rp6+Z1U}ILGenA@C+%R-Qj}kcHQ$12=p3xp
z=N9G1r{<LuWu_KmF)ux}Bt9=O7n`4oaZ7=86_zIE;L(Xk8l*cHauyAy-$AzGlmzKa
z$xp`O50EzOG9W$4py3HDx<GP?d3pJu=4*Z)w(tR&i(duE49G736f8D?^y8K-N-RgN
z>af+JAmfWubJF57VW&ty!VIrkh>ft49Iw*UJWw+X=5)MDL1!(24nHEo0iZ)F^6<4B
z(0q<uR3sM1=Yh`>!V;W`#qq_^GbOOg<|Y;tAl!&-Y%cg%Eo^a|o?222Z{}cYn4&1a
z)2KsHha)HvHeqcO;WGl%7(-}B_8X|Vq?cWulUa-}%n7Ojr$x9qkX)FOnuaB$p=DaW
z9+VM}y^{j#iRgh-Fab4?L<>6C3zl$^y@)WXB(bQZxID8Y1Ba!sq@@S(03lT=`N>F1
zu*DjvovN2snwMOXnV&~MExgsNmjo)Q<4ZstRD{Q|8IRCHvK^Q~hRp(SykjvNYXT(F
zvyh|$E>|HL3wO9cloo@^*kt0=qPSJRpeVB}u_RT$EU`#GEk7qEwWwI%I9I<izOb^`
zxWdvj-qP5_#G>5P$fBY!FQ*)I&`5foeu#cSW`Qoa(9Frv&C5zIuFN;8(g%fDNqmxp
zfw_fwN?Nk1g_()Dg<+DpS&D(BMQTb?O0rR!Ws-hzQ8GlSK0`c6OF?Q;v0gI6fVd?I
zbij;$NosC^UT$J?esP7ap@E*IZiR(eyqSrvv7R}SYY~pcHZTC|Rw8_dP>-!r9UZ4A
z!B!oBvIw+H#8Vl-0vl3W;!#MPacDhyeCCagQ-BI;Q11XXo&qWXp)rQ7fTVPs0>?xk
za`lD1*NdmOi>H^1uXj5-a#4UXHiWewJvwp$KClu}G9jl@Sm%Fq<RU9IxdbwdKRR*&
zEjf`B%IL@iX66BP^&wpjECB}U#$z9!0BOT61JVO-VdF3l|F{ImT>L6P=0h5{u<k!{
z6od5RmW6e)piaY9tAdON_xr$&@I3gq8D6yz8(|$lyh=w$E^rSBKqf&!O(3i-Sd>u)
zY)xDw{diiMD8}GXhh!R#7)1<gU{ipkA8Vf##T{5xAen$>0csecOCsq*&KsciBz&9%
z9Li`K!LxV-RY7{juyGqm*)cla5)bMSjgGfK$D2T@t01)ql2pL80dZp}WRJH%H*5d^
D44=yN

literal 0
HcmV?d00001

diff --git a/venv/lib/python3.7/site-packages/wrapt/decorators.py b/venv/lib/python3.7/site-packages/wrapt/decorators.py
new file mode 100644
index 00000000..506303d7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt/decorators.py
@@ -0,0 +1,516 @@
+"""This module implements decorators for implementing other decorators
+as well as some commonly used decorators.
+
+"""
+
+import sys
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+    string_types = basestring,
+
+    def exec_(_code_, _globs_=None, _locs_=None):
+        """Execute code in a namespace."""
+        if _globs_ is None:
+            frame = sys._getframe(1)
+            _globs_ = frame.f_globals
+            if _locs_ is None:
+                _locs_ = frame.f_locals
+            del frame
+        elif _locs_ is None:
+            _locs_ = _globs_
+        exec("""exec _code_ in _globs_, _locs_""")
+
+else:
+    string_types = str,
+
+    import builtins
+
+    exec_ = getattr(builtins, "exec")
+    del builtins
+
+from functools import partial
+from inspect import ismethod, isclass, formatargspec
+from collections import namedtuple
+from threading import Lock, RLock
+
+try:
+    from inspect import signature
+except ImportError:
+    pass
+
+from .wrappers import (FunctionWrapper, BoundFunctionWrapper, ObjectProxy,
+    CallableObjectProxy)
+
+# Adapter wrapper for the wrapped function which will overlay certain
+# properties from the adapter function onto the wrapped function so that
+# functions such as inspect.getargspec(), inspect.getfullargspec(),
+# inspect.signature() and inspect.getsource() return the correct results
+# one would expect.
+
+class _AdapterFunctionCode(CallableObjectProxy):
+
+    def __init__(self, wrapped_code, adapter_code):
+        super(_AdapterFunctionCode, self).__init__(wrapped_code)
+        self._self_adapter_code = adapter_code
+
+    @property
+    def co_argcount(self):
+        return self._self_adapter_code.co_argcount
+
+    @property
+    def co_code(self):
+        return self._self_adapter_code.co_code
+
+    @property
+    def co_flags(self):
+        return self._self_adapter_code.co_flags
+
+    @property
+    def co_kwonlyargcount(self):
+        return self._self_adapter_code.co_kwonlyargcount
+
+    @property
+    def co_varnames(self):
+        return self._self_adapter_code.co_varnames
+
+class _AdapterFunctionSurrogate(CallableObjectProxy):
+
+    def __init__(self, wrapped, adapter):
+        super(_AdapterFunctionSurrogate, self).__init__(wrapped)
+        self._self_adapter = adapter
+
+    @property
+    def __code__(self):
+        return _AdapterFunctionCode(self.__wrapped__.__code__,
+                self._self_adapter.__code__)
+
+    @property
+    def __defaults__(self):
+        return self._self_adapter.__defaults__
+
+    @property
+    def __kwdefaults__(self):
+        return self._self_adapter.__kwdefaults__
+
+    @property
+    def __signature__(self):
+        if 'signature' not in globals():
+            return self._self_adapter.__signature__
+        else:
+            return signature(self._self_adapter)
+
+    if PY2:
+        func_code = __code__
+        func_defaults = __defaults__
+
+class _BoundAdapterWrapper(BoundFunctionWrapper):
+
+    @property
+    def __func__(self):
+        return _AdapterFunctionSurrogate(self.__wrapped__.__func__,
+                self._self_parent._self_adapter)
+
+    @property
+    def __signature__(self):
+        if 'signature' not in globals():
+            return self.__wrapped__.__signature__
+        else:
+            return signature(self._self_parent._self_adapter)
+
+    if PY2:
+        im_func = __func__
+
+class AdapterWrapper(FunctionWrapper):
+
+    __bound_function_wrapper__ = _BoundAdapterWrapper
+
+    def __init__(self, *args, **kwargs):
+        adapter = kwargs.pop('adapter')
+        super(AdapterWrapper, self).__init__(*args, **kwargs)
+        self._self_surrogate = _AdapterFunctionSurrogate(
+                self.__wrapped__, adapter)
+        self._self_adapter = adapter
+
+    @property
+    def __code__(self):
+        return self._self_surrogate.__code__
+
+    @property
+    def __defaults__(self):
+        return self._self_surrogate.__defaults__
+
+    @property
+    def __kwdefaults__(self):
+        return self._self_surrogate.__kwdefaults__
+
+    if PY2:
+        func_code = __code__
+        func_defaults = __defaults__
+
+    @property
+    def __signature__(self):
+        return self._self_surrogate.__signature__
+
+class AdapterFactory(object):
+    def __call__(self, wrapped):
+        raise NotImplementedError()
+
+class DelegatedAdapterFactory(AdapterFactory):
+    def __init__(self, factory):
+        super(DelegatedAdapterFactory, self).__init__()
+        self.factory = factory
+    def __call__(self, wrapped):
+        return self.factory(wrapped)
+
+adapter_factory = DelegatedAdapterFactory
+
+# Decorator for creating other decorators. This decorator and the
+# wrappers which they use are designed to properly preserve any name
+# attributes, function signatures etc, in addition to the wrappers
+# themselves acting like a transparent proxy for the original wrapped
+# function so the wrapper is effectively indistinguishable from the
+# original wrapped function.
+
+def decorator(wrapper=None, enabled=None, adapter=None):
+    # The decorator should be supplied with a single positional argument
+    # which is the wrapper function to be used to implement the
+    # decorator. This may be preceded by a step whereby the keyword
+    # arguments are supplied to customise the behaviour of the
+    # decorator. The 'adapter' argument is used to optionally denote a
+    # separate function which is notionally used by an adapter
+    # decorator. In that case parts of the function '__code__' and
+    # '__defaults__' attributes are used from the adapter function
+    # rather than those of the wrapped function. This allows for the
+    # argument specification from inspect.getargspec() and similar
+    # functions to be overridden with a prototype for a different
+    # function than what was wrapped. The 'enabled' argument provides a
+    # way to enable/disable the use of the decorator. If the type of
+    # 'enabled' is a boolean, then it is evaluated immediately and the
+    # wrapper not even applied if it is False. If not a boolean, it will
+    # be evaluated when the wrapper is called for an unbound wrapper,
+    # and when binding occurs for a bound wrapper. When being evaluated,
+    # if 'enabled' is callable it will be called to obtain the value to
+    # be checked. If False, the wrapper will not be called and instead
+    # the original wrapped function will be called directly instead.
+
+    if wrapper is not None:
+        # Helper function for creating wrapper of the appropriate
+        # time when we need it down below.
+
+        def _build(wrapped, wrapper, enabled=None, adapter=None):
+            if adapter:
+                if isinstance(adapter, AdapterFactory):
+                    adapter = adapter(wrapped)
+
+                if not callable(adapter):
+                    ns = {}
+                    if not isinstance(adapter, string_types):
+                        adapter = formatargspec(*adapter)
+                    exec_('def adapter{}: pass'.format(adapter), ns, ns)
+                    adapter = ns['adapter']
+
+                return AdapterWrapper(wrapped=wrapped, wrapper=wrapper,
+                        enabled=enabled, adapter=adapter)
+
+            return FunctionWrapper(wrapped=wrapped, wrapper=wrapper,
+                    enabled=enabled)
+
+        # The wrapper has been provided so return the final decorator.
+        # The decorator is itself one of our function wrappers so we
+        # can determine when it is applied to functions, instance methods
+        # or class methods. This allows us to bind the instance or class
+        # method so the appropriate self or cls attribute is supplied
+        # when it is finally called.
+
+        def _wrapper(wrapped, instance, args, kwargs):
+            # We first check for the case where the decorator was applied
+            # to a class type.
+            #
+            #     @decorator
+            #     class mydecoratorclass(object):
+            #         def __init__(self, arg=None):
+            #             self.arg = arg
+            #         def __call__(self, wrapped, instance, args, kwargs):
+            #             return wrapped(*args, **kwargs)
+            #
+            #     @mydecoratorclass(arg=1)
+            #     def function():
+            #         pass
+            #
+            # In this case an instance of the class is to be used as the
+            # decorator wrapper function. If args was empty at this point,
+            # then it means that there were optional keyword arguments
+            # supplied to be used when creating an instance of the class
+            # to be used as the wrapper function.
+
+            if instance is None and isclass(wrapped) and not args:
+                # We still need to be passed the target function to be
+                # wrapped as yet, so we need to return a further function
+                # to be able to capture it.
+
+                def _capture(target_wrapped):
+                    # Now have the target function to be wrapped and need
+                    # to create an instance of the class which is to act
+                    # as the decorator wrapper function. Before we do that,
+                    # we need to first check that use of the decorator
+                    # hadn't been disabled by a simple boolean. If it was,
+                    # the target function to be wrapped is returned instead.
+
+                    _enabled = enabled
+                    if type(_enabled) is bool:
+                        if not _enabled:
+                            return target_wrapped
+                        _enabled = None
+
+                    # Now create an instance of the class which is to act
+                    # as the decorator wrapper function. Any arguments had
+                    # to be supplied as keyword only arguments so that is
+                    # all we pass when creating it.
+
+                    target_wrapper = wrapped(**kwargs)
+
+                    # Finally build the wrapper itself and return it.
+
+                    return _build(target_wrapped, target_wrapper,
+                            _enabled, adapter)
+
+                return _capture
+
+            # We should always have the target function to be wrapped at
+            # this point as the first (and only) value in args.
+
+            target_wrapped = args[0]
+
+            # Need to now check that use of the decorator hadn't been
+            # disabled by a simple boolean. If it was, then target
+            # function to be wrapped is returned instead.
+
+            _enabled = enabled
+            if type(_enabled) is bool:
+                if not _enabled:
+                    return target_wrapped
+                _enabled = None
+
+            # We now need to build the wrapper, but there are a couple of
+            # different cases we need to consider.
+
+            if instance is None:
+                if isclass(wrapped):
+                    # In this case the decorator was applied to a class
+                    # type but optional keyword arguments were not supplied
+                    # for initialising an instance of the class to be used
+                    # as the decorator wrapper function.
+                    #
+                    #     @decorator
+                    #     class mydecoratorclass(object):
+                    #         def __init__(self, arg=None):
+                    #             self.arg = arg
+                    #         def __call__(self, wrapped, instance,
+                    #                 args, kwargs):
+                    #             return wrapped(*args, **kwargs)
+                    #
+                    #     @mydecoratorclass
+                    #     def function():
+                    #         pass
+                    #
+                    # We still need to create an instance of the class to
+                    # be used as the decorator wrapper function, but no
+                    # arguments are pass.
+
+                    target_wrapper = wrapped()
+
+                else:
+                    # In this case the decorator was applied to a normal
+                    # function, or possibly a static method of a class.
+                    #
+                    #     @decorator
+                    #     def mydecoratorfuntion(wrapped, instance,
+                    #             args, kwargs):
+                    #         return wrapped(*args, **kwargs)
+                    #
+                    #     @mydecoratorfunction
+                    #     def function():
+                    #         pass
+                    #
+                    # That normal function becomes the decorator wrapper
+                    # function.
+
+                    target_wrapper = wrapper
+
+            else:
+                if isclass(instance):
+                    # In this case the decorator was applied to a class
+                    # method.
+                    #
+                    #     class myclass(object):
+                    #         @decorator
+                    #         @classmethod
+                    #         def decoratorclassmethod(cls, wrapped,
+                    #                 instance, args, kwargs):
+                    #             return wrapped(*args, **kwargs)
+                    #
+                    #     instance = myclass()
+                    #
+                    #     @instance.decoratorclassmethod
+                    #     def function():
+                    #         pass
+                    #
+                    # This one is a bit strange because binding was actually
+                    # performed on the wrapper created by our decorator
+                    # factory. We need to apply that binding to the decorator
+                    # wrapper function which which the decorator factory
+                    # was applied to.
+
+                    target_wrapper = wrapper.__get__(None, instance)
+
+                else:
+                    # In this case the decorator was applied to an instance
+                    # method.
+                    #
+                    #     class myclass(object):
+                    #         @decorator
+                    #         def decoratorclassmethod(self, wrapped,
+                    #                 instance, args, kwargs):
+                    #             return wrapped(*args, **kwargs)
+                    #
+                    #     instance = myclass()
+                    #
+                    #     @instance.decoratorclassmethod
+                    #     def function():
+                    #         pass
+                    #
+                    # This one is a bit strange because binding was actually
+                    # performed on the wrapper created by our decorator
+                    # factory. We need to apply that binding to the decorator
+                    # wrapper function which which the decorator factory
+                    # was applied to.
+
+                    target_wrapper = wrapper.__get__(instance, type(instance))
+
+            # Finally build the wrapper itself and return it.
+
+            return _build(target_wrapped, target_wrapper, _enabled, adapter)
+
+        # We first return our magic function wrapper here so we can
+        # determine in what context the decorator factory was used. In
+        # other words, it is itself a universal decorator. The decorator
+        # function is used as the adapter so that linters see a signature
+        # corresponding to the decorator and not the wrapper it is being
+        # applied to.
+
+        return _build(wrapper, _wrapper, adapter=decorator)
+
+    else:
+        # The wrapper still has not been provided, so we are just
+        # collecting the optional keyword arguments. Return the
+        # decorator again wrapped in a partial using the collected
+        # arguments.
+
+        return partial(decorator, enabled=enabled, adapter=adapter)
+
+# Decorator for implementing thread synchronization. It can be used as a
+# decorator, in which case the synchronization context is determined by
+# what type of function is wrapped, or it can also be used as a context
+# manager, where the user needs to supply the correct synchronization
+# context. It is also possible to supply an object which appears to be a
+# synchronization primitive of some sort, by virtue of having release()
+# and acquire() methods. In that case that will be used directly as the
+# synchronization primitive without creating a separate lock against the
+# derived or supplied context.
+
+def synchronized(wrapped):
+    # Determine if being passed an object which is a synchronization
+    # primitive. We can't check by type for Lock, RLock, Semaphore etc,
+    # as the means of creating them isn't the type. Therefore use the
+    # existence of acquire() and release() methods. This is more
+    # extensible anyway as it allows custom synchronization mechanisms.
+
+    if hasattr(wrapped, 'acquire') and hasattr(wrapped, 'release'):
+        # We remember what the original lock is and then return a new
+        # decorator which accesses and locks it. When returning the new
+        # decorator we wrap it with an object proxy so we can override
+        # the context manager methods in case it is being used to wrap
+        # synchronized statements with a 'with' statement.
+
+        lock = wrapped
+
+        @decorator
+        def _synchronized(wrapped, instance, args, kwargs):
+            # Execute the wrapped function while the original supplied
+            # lock is held.
+
+            with lock:
+                return wrapped(*args, **kwargs)
+
+        class _PartialDecorator(CallableObjectProxy):
+
+            def __enter__(self):
+                lock.acquire()
+                return lock
+
+            def __exit__(self, *args):
+                lock.release()
+
+        return _PartialDecorator(wrapped=_synchronized)
+
+    # Following only apply when the lock is being created automatically
+    # based on the context of what was supplied. In this case we supply
+    # a final decorator, but need to use FunctionWrapper directly as we
+    # want to derive from it to add context manager methods in case it is
+    # being used to wrap synchronized statements with a 'with' statement.
+
+    def _synchronized_lock(context):
+        # Attempt to retrieve the lock for the specific context.
+
+        lock = vars(context).get('_synchronized_lock', None)
+
+        if lock is None:
+            # There is no existing lock defined for the context we
+            # are dealing with so we need to create one. This needs
+            # to be done in a way to guarantee there is only one
+            # created, even if multiple threads try and create it at
+            # the same time. We can't always use the setdefault()
+            # method on the __dict__ for the context. This is the
+            # case where the context is a class, as __dict__ is
+            # actually a dictproxy. What we therefore do is use a
+            # meta lock on this wrapper itself, to control the
+            # creation and assignment of the lock attribute against
+            # the context.
+
+            with synchronized._synchronized_meta_lock:
+                # We need to check again for whether the lock we want
+                # exists in case two threads were trying to create it
+                # at the same time and were competing to create the
+                # meta lock.
+
+                lock = vars(context).get('_synchronized_lock', None)
+
+                if lock is None:
+                    lock = RLock()
+                    setattr(context, '_synchronized_lock', lock)
+
+        return lock
+
+    def _synchronized_wrapper(wrapped, instance, args, kwargs):
+        # Execute the wrapped function while the lock for the
+        # desired context is held. If instance is None then the
+        # wrapped function is used as the context.
+
+        with _synchronized_lock(instance if instance is not None else wrapped):
+            return wrapped(*args, **kwargs)
+
+    class _FinalDecorator(FunctionWrapper):
+
+        def __enter__(self):
+            self._self_lock = _synchronized_lock(self.__wrapped__)
+            self._self_lock.acquire()
+            return self._self_lock
+
+        def __exit__(self, *args):
+            self._self_lock.release()
+
+    return _FinalDecorator(wrapped=wrapped, wrapper=_synchronized_wrapper)
+
+synchronized._synchronized_meta_lock = Lock()
diff --git a/venv/lib/python3.7/site-packages/wrapt/importer.py b/venv/lib/python3.7/site-packages/wrapt/importer.py
new file mode 100644
index 00000000..4665f386
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt/importer.py
@@ -0,0 +1,230 @@
+"""This module implements a post import hook mechanism styled after what is
+described in PEP-369. Note that it doesn't cope with modules being reloaded.
+
+"""
+
+import sys
+import threading
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+    string_types = basestring,
+else:
+    import importlib
+    string_types = str,
+
+from .decorators import synchronized
+
+# The dictionary registering any post import hooks to be triggered once
+# the target module has been imported. Once a module has been imported
+# and the hooks fired, the list of hooks recorded against the target
+# module will be truncacted but the list left in the dictionary. This
+# acts as a flag to indicate that the module had already been imported.
+
+_post_import_hooks = {}
+_post_import_hooks_init = False
+_post_import_hooks_lock = threading.RLock()
+
+# Register a new post import hook for the target module name. This
+# differs from the PEP-369 implementation in that it also allows the
+# hook function to be specified as a string consisting of the name of
+# the callback in the form 'module:function'. This will result in a
+# proxy callback being registered which will defer loading of the
+# specified module containing the callback function until required.
+
+def _create_import_hook_from_string(name):
+    def import_hook(module):
+        module_name, function = name.split(':')
+        attrs = function.split('.')
+        __import__(module_name)
+        callback = sys.modules[module_name]
+        for attr in attrs:
+            callback = getattr(callback, attr)
+        return callback(module)
+    return import_hook
+
+@synchronized(_post_import_hooks_lock)
+def register_post_import_hook(hook, name):
+    # Create a deferred import hook if hook is a string name rather than
+    # a callable function.
+
+    if isinstance(hook, string_types):
+        hook = _create_import_hook_from_string(hook)
+
+    # Automatically install the import hook finder if it has not already
+    # been installed.
+
+    global _post_import_hooks_init
+
+    if not _post_import_hooks_init:
+        _post_import_hooks_init = True
+        sys.meta_path.insert(0, ImportHookFinder())
+
+    # Determine if any prior registration of a post import hook for
+    # the target modules has occurred and act appropriately.
+
+    hooks = _post_import_hooks.get(name, None)
+
+    if hooks is None:
+        # No prior registration of post import hooks for the target
+        # module. We need to check whether the module has already been
+        # imported. If it has we fire the hook immediately and add an
+        # empty list to the registry to indicate that the module has
+        # already been imported and hooks have fired. Otherwise add
+        # the post import hook to the registry.
+
+        module = sys.modules.get(name, None)
+
+        if module is not None:
+            _post_import_hooks[name] = []
+            hook(module)
+
+        else:
+            _post_import_hooks[name] = [hook]
+
+    elif hooks == []:
+        # A prior registration of port import hooks for the target
+        # module was done and the hooks already fired. Fire the hook
+        # immediately.
+
+        module = sys.modules[name]
+        hook(module)
+
+    else:
+        # A prior registration of port import hooks for the target
+        # module was done but the module has not yet been imported.
+
+        _post_import_hooks[name].append(hook)
+
+# Register post import hooks defined as package entry points.
+
+def _create_import_hook_from_entrypoint(entrypoint):
+    def import_hook(module):
+        __import__(entrypoint.module_name)
+        callback = sys.modules[entrypoint.module_name]
+        for attr in entrypoint.attrs:
+            callback = getattr(callback, attr)
+        return callback(module)
+    return import_hook
+
+def discover_post_import_hooks(group):
+    try:
+        import pkg_resources
+    except ImportError:
+        return
+
+    for entrypoint in pkg_resources.iter_entry_points(group=group):
+        callback = _create_import_hook_from_entrypoint(entrypoint)
+        register_post_import_hook(callback, entrypoint.name)
+
+# Indicate that a module has been loaded. Any post import hooks which
+# were registered against the target module will be invoked. If an
+# exception is raised in any of the post import hooks, that will cause
+# the import of the target module to fail.
+
+@synchronized(_post_import_hooks_lock)
+def notify_module_loaded(module):
+    name = getattr(module, '__name__', None)
+    hooks = _post_import_hooks.get(name, None)
+
+    if hooks:
+        _post_import_hooks[name] = []
+
+        for hook in hooks:
+            hook(module)
+
+# A custom module import finder. This intercepts attempts to import
+# modules and watches out for attempts to import target modules of
+# interest. When a module of interest is imported, then any post import
+# hooks which are registered will be invoked.
+
+class _ImportHookLoader:
+
+    def load_module(self, fullname):
+        module = sys.modules[fullname]
+        notify_module_loaded(module)
+
+        return module
+
+class _ImportHookChainedLoader:
+
+    def __init__(self, loader):
+        self.loader = loader
+
+    def load_module(self, fullname):
+        module = self.loader.load_module(fullname)
+        notify_module_loaded(module)
+
+        return module
+
+class ImportHookFinder:
+
+    def __init__(self):
+        self.in_progress = {}
+
+    @synchronized(_post_import_hooks_lock)
+    def find_module(self, fullname, path=None):
+        # If the module being imported is not one we have registered
+        # post import hooks for, we can return immediately. We will
+        # take no further part in the importing of this module.
+
+        if not fullname in _post_import_hooks:
+            return None
+
+        # When we are interested in a specific module, we will call back
+        # into the import system a second time to defer to the import
+        # finder that is supposed to handle the importing of the module.
+        # We set an in progress flag for the target module so that on
+        # the second time through we don't trigger another call back
+        # into the import system and cause a infinite loop.
+
+        if fullname in self.in_progress:
+            return None
+
+        self.in_progress[fullname] = True
+
+        # Now call back into the import system again.
+
+        try:
+            if PY2:
+                # For Python 2 we don't have much choice but to
+                # call back in to __import__(). This will
+                # actually cause the module to be imported. If no
+                # module could be found then ImportError will be
+                # raised. Otherwise we return a loader which
+                # returns the already loaded module and invokes
+                # the post import hooks.
+
+                __import__(fullname)
+
+                return _ImportHookLoader()
+
+            else:
+                # For Python 3 we need to use find_spec().loader
+                # from the importlib.util module. It doesn't actually
+                # import the target module and only finds the
+                # loader. If a loader is found, we need to return
+                # our own loader which will then in turn call the
+                # real loader to import the module and invoke the
+                # post import hooks.
+                try:
+                    import importlib.util
+                    loader = importlib.util.find_spec(fullname).loader
+                except (ImportError, AttributeError):
+                    loader = importlib.find_loader(fullname, path)
+                if loader:
+                    return _ImportHookChainedLoader(loader)
+
+
+        finally:
+            del self.in_progress[fullname]
+
+# Decorator for marking that a function should be called as a post
+# import hook when the target module is imported.
+
+def when_imported(name):
+    def register(hook):
+        register_post_import_hook(hook, name)
+        return hook
+    return register
diff --git a/venv/lib/python3.7/site-packages/wrapt/wrappers.py b/venv/lib/python3.7/site-packages/wrapt/wrappers.py
new file mode 100644
index 00000000..18cf5e05
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/wrapt/wrappers.py
@@ -0,0 +1,947 @@
+import os
+import sys
+import functools
+import operator
+import weakref
+import inspect
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+    string_types = basestring,
+else:
+    string_types = str,
+
+def with_metaclass(meta, *bases):
+    """Create a base class with a metaclass."""
+    return meta("NewBase", bases, {})
+
+class _ObjectProxyMethods(object):
+
+    # We use properties to override the values of __module__ and
+    # __doc__. If we add these in ObjectProxy, the derived class
+    # __dict__ will still be setup to have string variants of these
+    # attributes and the rules of descriptors means that they appear to
+    # take precedence over the properties in the base class. To avoid
+    # that, we copy the properties into the derived class type itself
+    # via a meta class. In that way the properties will always take
+    # precedence.
+
+    @property
+    def __module__(self):
+        return self.__wrapped__.__module__
+
+    @__module__.setter
+    def __module__(self, value):
+        self.__wrapped__.__module__ = value
+
+    @property
+    def __doc__(self):
+        return self.__wrapped__.__doc__
+
+    @__doc__.setter
+    def __doc__(self, value):
+        self.__wrapped__.__doc__ = value
+
+    # We similar use a property for __dict__. We need __dict__ to be
+    # explicit to ensure that vars() works as expected.
+
+    @property
+    def __dict__(self):
+        return self.__wrapped__.__dict__
+
+    # Need to also propagate the special __weakref__ attribute for case
+    # where decorating classes which will define this. If do not define
+    # it and use a function like inspect.getmembers() on a decorator
+    # class it will fail. This can't be in the derived classes.
+
+    @property
+    def __weakref__(self):
+        return self.__wrapped__.__weakref__
+
+class _ObjectProxyMetaType(type):
+    def __new__(cls, name, bases, dictionary):
+        # Copy our special properties into the class so that they
+        # always take precedence over attributes of the same name added
+        # during construction of a derived class. This is to save
+        # duplicating the implementation for them in all derived classes.
+
+        dictionary.update(vars(_ObjectProxyMethods))
+
+        return type.__new__(cls, name, bases, dictionary)
+
+class ObjectProxy(with_metaclass(_ObjectProxyMetaType)):
+
+    __slots__ = '__wrapped__'
+
+    def __init__(self, wrapped):
+        object.__setattr__(self, '__wrapped__', wrapped)
+
+        # Python 3.2+ has the __qualname__ attribute, but it does not
+        # allow it to be overridden using a property and it must instead
+        # be an actual string object instead.
+
+        try:
+            object.__setattr__(self, '__qualname__', wrapped.__qualname__)
+        except AttributeError:
+            pass
+
+    @property
+    def __name__(self):
+        return self.__wrapped__.__name__
+
+    @__name__.setter
+    def __name__(self, value):
+        self.__wrapped__.__name__ = value
+
+    @property
+    def __class__(self):
+        return self.__wrapped__.__class__
+
+    @__class__.setter
+    def __class__(self, value):
+        self.__wrapped__.__class__ = value
+
+    @property
+    def __annotations__(self):
+        return self.__wrapped__.__annotations__
+
+    @__annotations__.setter
+    def __annotations__(self, value):
+        self.__wrapped__.__annotations__ = value
+
+    def __dir__(self):
+        return dir(self.__wrapped__)
+
+    def __str__(self):
+        return str(self.__wrapped__)
+
+    if not PY2:
+        def __bytes__(self):
+            return bytes(self.__wrapped__)
+
+    def __repr__(self):
+        return '<{} at 0x{:x} for {} at 0x{:x}>'.format(
+                type(self).__name__, id(self),
+                type(self.__wrapped__).__name__,
+                id(self.__wrapped__))
+
+    def __reversed__(self):
+        return reversed(self.__wrapped__)
+
+    if not PY2:
+        def __round__(self):
+            return round(self.__wrapped__)
+
+    if sys.hexversion >= 0x03070000:
+        def __mro_entries__(self, bases):
+            return (self.__wrapped__,)
+
+    def __lt__(self, other):
+        return self.__wrapped__ < other
+
+    def __le__(self, other):
+        return self.__wrapped__ <= other
+
+    def __eq__(self, other):
+        return self.__wrapped__ == other
+
+    def __ne__(self, other):
+        return self.__wrapped__ != other
+
+    def __gt__(self, other):
+        return self.__wrapped__ > other
+
+    def __ge__(self, other):
+        return self.__wrapped__ >= other
+
+    def __hash__(self):
+        return hash(self.__wrapped__)
+
+    def __nonzero__(self):
+        return bool(self.__wrapped__)
+
+    def __bool__(self):
+        return bool(self.__wrapped__)
+
+    def __setattr__(self, name, value):
+        if name.startswith('_self_'):
+            object.__setattr__(self, name, value)
+
+        elif name == '__wrapped__':
+            object.__setattr__(self, name, value)
+            try:
+                object.__delattr__(self, '__qualname__')
+            except AttributeError:
+                pass
+            try:
+                object.__setattr__(self, '__qualname__', value.__qualname__)
+            except AttributeError:
+                pass
+
+        elif name == '__qualname__':
+            setattr(self.__wrapped__, name, value)
+            object.__setattr__(self, name, value)
+
+        elif hasattr(type(self), name):
+            object.__setattr__(self, name, value)
+
+        else:
+            setattr(self.__wrapped__, name, value)
+
+    def __getattr__(self, name):
+        # If we are being to lookup '__wrapped__' then the
+        # '__init__()' method cannot have been called.
+
+        if name == '__wrapped__':
+            raise ValueError('wrapper has not been initialised')
+
+        return getattr(self.__wrapped__, name)
+
+    def __delattr__(self, name):
+        if name.startswith('_self_'):
+            object.__delattr__(self, name)
+
+        elif name == '__wrapped__':
+            raise TypeError('__wrapped__ must be an object')
+
+        elif name == '__qualname__':
+            object.__delattr__(self, name)
+            delattr(self.__wrapped__, name)
+
+        elif hasattr(type(self), name):
+            object.__delattr__(self, name)
+
+        else:
+            delattr(self.__wrapped__, name)
+
+    def __add__(self, other):
+        return self.__wrapped__ + other
+
+    def __sub__(self, other):
+        return self.__wrapped__ - other
+
+    def __mul__(self, other):
+        return self.__wrapped__ * other
+
+    def __div__(self, other):
+        return operator.div(self.__wrapped__, other)
+
+    def __truediv__(self, other):
+        return operator.truediv(self.__wrapped__, other)
+
+    def __floordiv__(self, other):
+        return self.__wrapped__ // other
+
+    def __mod__(self, other):
+        return self.__wrapped__ % other
+
+    def __divmod__(self, other):
+        return divmod(self.__wrapped__, other)
+
+    def __pow__(self, other, *args):
+        return pow(self.__wrapped__, other, *args)
+
+    def __lshift__(self, other):
+        return self.__wrapped__ << other
+
+    def __rshift__(self, other):
+        return self.__wrapped__ >> other
+
+    def __and__(self, other):
+        return self.__wrapped__ & other
+
+    def __xor__(self, other):
+        return self.__wrapped__ ^ other
+
+    def __or__(self, other):
+        return self.__wrapped__ | other
+
+    def __radd__(self, other):
+        return other + self.__wrapped__
+
+    def __rsub__(self, other):
+        return other - self.__wrapped__
+
+    def __rmul__(self, other):
+        return other * self.__wrapped__
+
+    def __rdiv__(self, other):
+        return operator.div(other, self.__wrapped__)
+
+    def __rtruediv__(self, other):
+        return operator.truediv(other, self.__wrapped__)
+
+    def __rfloordiv__(self, other):
+        return other // self.__wrapped__
+
+    def __rmod__(self, other):
+        return other % self.__wrapped__
+
+    def __rdivmod__(self, other):
+        return divmod(other, self.__wrapped__)
+
+    def __rpow__(self, other, *args):
+        return pow(other, self.__wrapped__, *args)
+
+    def __rlshift__(self, other):
+        return other << self.__wrapped__
+
+    def __rrshift__(self, other):
+        return other >> self.__wrapped__
+
+    def __rand__(self, other):
+        return other & self.__wrapped__
+
+    def __rxor__(self, other):
+        return other ^ self.__wrapped__
+
+    def __ror__(self, other):
+        return other | self.__wrapped__
+
+    def __iadd__(self, other):
+        self.__wrapped__ += other
+        return self
+
+    def __isub__(self, other):
+        self.__wrapped__ -= other
+        return self
+
+    def __imul__(self, other):
+        self.__wrapped__ *= other
+        return self
+
+    def __idiv__(self, other):
+        self.__wrapped__ = operator.idiv(self.__wrapped__, other)
+        return self
+
+    def __itruediv__(self, other):
+        self.__wrapped__ = operator.itruediv(self.__wrapped__, other)
+        return self
+
+    def __ifloordiv__(self, other):
+        self.__wrapped__ //= other
+        return self
+
+    def __imod__(self, other):
+        self.__wrapped__ %= other
+        return self
+
+    def __ipow__(self, other):
+        self.__wrapped__ **= other
+        return self
+
+    def __ilshift__(self, other):
+        self.__wrapped__ <<= other
+        return self
+
+    def __irshift__(self, other):
+        self.__wrapped__ >>= other
+        return self
+
+    def __iand__(self, other):
+        self.__wrapped__ &= other
+        return self
+
+    def __ixor__(self, other):
+        self.__wrapped__ ^= other
+        return self
+
+    def __ior__(self, other):
+        self.__wrapped__ |= other
+        return self
+
+    def __neg__(self):
+        return -self.__wrapped__
+
+    def __pos__(self):
+        return +self.__wrapped__
+
+    def __abs__(self):
+        return abs(self.__wrapped__)
+
+    def __invert__(self):
+        return ~self.__wrapped__
+
+    def __int__(self):
+        return int(self.__wrapped__)
+
+    def __long__(self):
+        return long(self.__wrapped__)
+
+    def __float__(self):
+        return float(self.__wrapped__)
+
+    def __complex__(self):
+        return complex(self.__wrapped__)
+
+    def __oct__(self):
+        return oct(self.__wrapped__)
+
+    def __hex__(self):
+        return hex(self.__wrapped__)
+
+    def __index__(self):
+        return operator.index(self.__wrapped__)
+
+    def __len__(self):
+        return len(self.__wrapped__)
+
+    def __contains__(self, value):
+        return value in self.__wrapped__
+
+    def __getitem__(self, key):
+        return self.__wrapped__[key]
+
+    def __setitem__(self, key, value):
+        self.__wrapped__[key] = value
+
+    def __delitem__(self, key):
+        del self.__wrapped__[key]
+
+    def __getslice__(self, i, j):
+        return self.__wrapped__[i:j]
+
+    def __setslice__(self, i, j, value):
+        self.__wrapped__[i:j] = value
+
+    def __delslice__(self, i, j):
+        del self.__wrapped__[i:j]
+
+    def __enter__(self):
+        return self.__wrapped__.__enter__()
+
+    def __exit__(self, *args, **kwargs):
+        return self.__wrapped__.__exit__(*args, **kwargs)
+
+    def __iter__(self):
+        return iter(self.__wrapped__)
+
+    def __copy__(self):
+        raise NotImplementedError('object proxy must define __copy__()')
+
+    def __deepcopy__(self, memo):
+        raise NotImplementedError('object proxy must define __deepcopy__()')
+
+    def __reduce__(self):
+        raise NotImplementedError(
+                'object proxy must define __reduce_ex__()')
+
+    def __reduce_ex__(self, protocol):
+        raise NotImplementedError(
+                'object proxy must define __reduce_ex__()')
+
+class CallableObjectProxy(ObjectProxy):
+
+    def __call__(self, *args, **kwargs):
+        return self.__wrapped__(*args, **kwargs)
+
+class PartialCallableObjectProxy(ObjectProxy):
+
+    def __init__(self, *args, **kwargs):
+        if len(args) < 1:
+            raise TypeError('partial type takes at least one argument')
+
+        wrapped, args = args[0], args[1:]
+
+        if not callable(wrapped):
+            raise TypeError('the first argument must be callable')
+
+        super(PartialCallableObjectProxy, self).__init__(wrapped)
+
+        self._self_args = args
+        self._self_kwargs = kwargs
+
+    def __call__(self, *args, **kwargs):
+        _args = self._self_args + args
+
+        _kwargs = dict(self._self_kwargs)
+        _kwargs.update(kwargs)
+
+        return self.__wrapped__(*_args, **_kwargs)
+
+class _FunctionWrapperBase(ObjectProxy):
+
+    __slots__ = ('_self_instance', '_self_wrapper', '_self_enabled',
+            '_self_binding', '_self_parent')
+
+    def __init__(self, wrapped, instance, wrapper, enabled=None,
+            binding='function', parent=None):
+
+        super(_FunctionWrapperBase, self).__init__(wrapped)
+
+        object.__setattr__(self, '_self_instance', instance)
+        object.__setattr__(self, '_self_wrapper', wrapper)
+        object.__setattr__(self, '_self_enabled', enabled)
+        object.__setattr__(self, '_self_binding', binding)
+        object.__setattr__(self, '_self_parent', parent)
+
+    def __get__(self, instance, owner):
+        # This method is actually doing double duty for both unbound and
+        # bound derived wrapper classes. It should possibly be broken up
+        # and the distinct functionality moved into the derived classes.
+        # Can't do that straight away due to some legacy code which is
+        # relying on it being here in this base class.
+        #
+        # The distinguishing attribute which determines whether we are
+        # being called in an unbound or bound wrapper is the parent
+        # attribute. If binding has never occurred, then the parent will
+        # be None.
+        #
+        # First therefore, is if we are called in an unbound wrapper. In
+        # this case we perform the binding.
+        #
+        # We have one special case to worry about here. This is where we
+        # are decorating a nested class. In this case the wrapped class
+        # would not have a __get__() method to call. In that case we
+        # simply return self.
+        #
+        # Note that we otherwise still do binding even if instance is
+        # None and accessing an unbound instance method from a class.
+        # This is because we need to be able to later detect that
+        # specific case as we will need to extract the instance from the
+        # first argument of those passed in.
+
+        if self._self_parent is None:
+            if not inspect.isclass(self.__wrapped__):
+                descriptor = self.__wrapped__.__get__(instance, owner)
+
+                return self.__bound_function_wrapper__(descriptor, instance,
+                        self._self_wrapper, self._self_enabled,
+                        self._self_binding, self)
+
+            return self
+
+        # Now we have the case of binding occurring a second time on what
+        # was already a bound function. In this case we would usually
+        # return ourselves again. This mirrors what Python does.
+        #
+        # The special case this time is where we were originally bound
+        # with an instance of None and we were likely an instance
+        # method. In that case we rebind against the original wrapped
+        # function from the parent again.
+
+        if self._self_instance is None and self._self_binding == 'function':
+            descriptor = self._self_parent.__wrapped__.__get__(
+                    instance, owner)
+
+            return self._self_parent.__bound_function_wrapper__(
+                    descriptor, instance, self._self_wrapper,
+                    self._self_enabled, self._self_binding,
+                    self._self_parent)
+
+        return self
+
+    def __call__(self, *args, **kwargs):
+        # If enabled has been specified, then evaluate it at this point
+        # and if the wrapper is not to be executed, then simply return
+        # the bound function rather than a bound wrapper for the bound
+        # function. When evaluating enabled, if it is callable we call
+        # it, otherwise we evaluate it as a boolean.
+
+        if self._self_enabled is not None:
+            if callable(self._self_enabled):
+                if not self._self_enabled():
+                    return self.__wrapped__(*args, **kwargs)
+            elif not self._self_enabled:
+                return self.__wrapped__(*args, **kwargs)
+
+        # This can occur where initial function wrapper was applied to
+        # a function that was already bound to an instance. In that case
+        # we want to extract the instance from the function and use it.
+
+        if self._self_binding == 'function':
+            if self._self_instance is None:
+                instance = getattr(self.__wrapped__, '__self__', None)
+                if instance is not None:
+                    return self._self_wrapper(self.__wrapped__, instance,
+                            args, kwargs)
+
+        # This is generally invoked when the wrapped function is being
+        # called as a normal function and is not bound to a class as an
+        # instance method. This is also invoked in the case where the
+        # wrapped function was a method, but this wrapper was in turn
+        # wrapped using the staticmethod decorator.
+
+        return self._self_wrapper(self.__wrapped__, self._self_instance,
+                args, kwargs)
+
+class BoundFunctionWrapper(_FunctionWrapperBase):
+
+    def __call__(self, *args, **kwargs):
+        # If enabled has been specified, then evaluate it at this point
+        # and if the wrapper is not to be executed, then simply return
+        # the bound function rather than a bound wrapper for the bound
+        # function. When evaluating enabled, if it is callable we call
+        # it, otherwise we evaluate it as a boolean.
+
+        if self._self_enabled is not None:
+            if callable(self._self_enabled):
+                if not self._self_enabled():
+                    return self.__wrapped__(*args, **kwargs)
+            elif not self._self_enabled:
+                return self.__wrapped__(*args, **kwargs)
+
+        # We need to do things different depending on whether we are
+        # likely wrapping an instance method vs a static method or class
+        # method.
+
+        if self._self_binding == 'function':
+            if self._self_instance is None:
+                # This situation can occur where someone is calling the
+                # instancemethod via the class type and passing the instance
+                # as the first argument. We need to shift the args before
+                # making the call to the wrapper and effectively bind the
+                # instance to the wrapped function using a partial so the
+                # wrapper doesn't see anything as being different.
+
+                if not args:
+                    raise TypeError('missing 1 required positional argument')
+
+                instance, args = args[0], args[1:]
+                wrapped = PartialCallableObjectProxy(self.__wrapped__, instance)
+                return self._self_wrapper(wrapped, instance, args, kwargs)
+
+            return self._self_wrapper(self.__wrapped__, self._self_instance,
+                    args, kwargs)
+
+        else:
+            # As in this case we would be dealing with a classmethod or
+            # staticmethod, then _self_instance will only tell us whether
+            # when calling the classmethod or staticmethod they did it via an
+            # instance of the class it is bound to and not the case where
+            # done by the class type itself. We thus ignore _self_instance
+            # and use the __self__ attribute of the bound function instead.
+            # For a classmethod, this means instance will be the class type
+            # and for a staticmethod it will be None. This is probably the
+            # more useful thing we can pass through even though we loose
+            # knowledge of whether they were called on the instance vs the
+            # class type, as it reflects what they have available in the
+            # decoratored function.
+
+            instance = getattr(self.__wrapped__, '__self__', None)
+
+            return self._self_wrapper(self.__wrapped__, instance, args,
+                    kwargs)
+
+class FunctionWrapper(_FunctionWrapperBase):
+
+    __bound_function_wrapper__ = BoundFunctionWrapper
+
+    def __init__(self, wrapped, wrapper, enabled=None):
+        # What it is we are wrapping here could be anything. We need to
+        # try and detect specific cases though. In particular, we need
+        # to detect when we are given something that is a method of a
+        # class. Further, we need to know when it is likely an instance
+        # method, as opposed to a class or static method. This can
+        # become problematic though as there isn't strictly a fool proof
+        # method of knowing.
+        #
+        # The situations we could encounter when wrapping a method are:
+        #
+        # 1. The wrapper is being applied as part of a decorator which
+        # is a part of the class definition. In this case what we are
+        # given is the raw unbound function, classmethod or staticmethod
+        # wrapper objects.
+        #
+        # The problem here is that we will not know we are being applied
+        # in the context of the class being set up. This becomes
+        # important later for the case of an instance method, because in
+        # that case we just see it as a raw function and can't
+        # distinguish it from wrapping a normal function outside of
+        # a class context.
+        #
+        # 2. The wrapper is being applied when performing monkey
+        # patching of the class type afterwards and the method to be
+        # wrapped was retrieved direct from the __dict__ of the class
+        # type. This is effectively the same as (1) above.
+        #
+        # 3. The wrapper is being applied when performing monkey
+        # patching of the class type afterwards and the method to be
+        # wrapped was retrieved from the class type. In this case
+        # binding will have been performed where the instance against
+        # which the method is bound will be None at that point.
+        #
+        # This case is a problem because we can no longer tell if the
+        # method was a static method, plus if using Python3, we cannot
+        # tell if it was an instance method as the concept of an
+        # unnbound method no longer exists.
+        #
+        # 4. The wrapper is being applied when performing monkey
+        # patching of an instance of a class. In this case binding will
+        # have been perfomed where the instance was not None.
+        #
+        # This case is a problem because we can no longer tell if the
+        # method was a static method.
+        #
+        # Overall, the best we can do is look at the original type of the
+        # object which was wrapped prior to any binding being done and
+        # see if it is an instance of classmethod or staticmethod. In
+        # the case where other decorators are between us and them, if
+        # they do not propagate the __class__  attribute so that the
+        # isinstance() checks works, then likely this will do the wrong
+        # thing where classmethod and staticmethod are used.
+        #
+        # Since it is likely to be very rare that anyone even puts
+        # decorators around classmethod and staticmethod, likelihood of
+        # that being an issue is very small, so we accept it and suggest
+        # that those other decorators be fixed. It is also only an issue
+        # if a decorator wants to actually do things with the arguments.
+        #
+        # As to not being able to identify static methods properly, we
+        # just hope that that isn't something people are going to want
+        # to wrap, or if they do suggest they do it the correct way by
+        # ensuring that it is decorated in the class definition itself,
+        # or patch it in the __dict__ of the class type.
+        #
+        # So to get the best outcome we can, whenever we aren't sure what
+        # it is, we label it as a 'function'. If it was already bound and
+        # that is rebound later, we assume that it will be an instance
+        # method and try an cope with the possibility that the 'self'
+        # argument it being passed as an explicit argument and shuffle
+        # the arguments around to extract 'self' for use as the instance.
+
+        if isinstance(wrapped, classmethod):
+            binding = 'classmethod'
+
+        elif isinstance(wrapped, staticmethod):
+            binding = 'staticmethod'
+
+        elif hasattr(wrapped, '__self__'):
+            if inspect.isclass(wrapped.__self__):
+                binding = 'classmethod'
+            else:
+                binding = 'function'
+
+        else:
+            binding = 'function'
+
+        super(FunctionWrapper, self).__init__(wrapped, None, wrapper,
+                enabled, binding)
+
+try:
+    if not os.environ.get('WRAPT_DISABLE_EXTENSIONS'):
+        from ._wrappers import (ObjectProxy, CallableObjectProxy,
+            PartialCallableObjectProxy, FunctionWrapper,
+            BoundFunctionWrapper, _FunctionWrapperBase)
+except ImportError:
+    pass
+
+# Helper functions for applying wrappers to existing functions.
+
+def resolve_path(module, name):
+    if isinstance(module, string_types):
+        __import__(module)
+        module = sys.modules[module]
+
+    parent = module
+
+    path = name.split('.')
+    attribute = path[0]
+
+    # We can't just always use getattr() because in doing
+    # that on a class it will cause binding to occur which
+    # will complicate things later and cause some things not
+    # to work. For the case of a class we therefore access
+    # the __dict__ directly. To cope though with the wrong
+    # class being given to us, or a method being moved into
+    # a base class, we need to walk the class hierarchy to
+    # work out exactly which __dict__ the method was defined
+    # in, as accessing it from __dict__ will fail if it was
+    # not actually on the class given. Fallback to using
+    # getattr() if we can't find it. If it truly doesn't
+    # exist, then that will fail.
+
+    def lookup_attribute(parent, attribute):
+        if inspect.isclass(parent):
+            for cls in inspect.getmro(parent):
+                if attribute in vars(cls):
+                    return vars(cls)[attribute]
+            else:
+                return getattr(parent, attribute)
+        else:
+            return getattr(parent, attribute)
+
+    original = lookup_attribute(parent, attribute)
+
+    for attribute in path[1:]:
+        parent = original
+        original = lookup_attribute(parent, attribute)
+
+    return (parent, attribute, original)
+
+def apply_patch(parent, attribute, replacement):
+    setattr(parent, attribute, replacement)
+
+def wrap_object(module, name, factory, args=(), kwargs={}):
+    (parent, attribute, original) = resolve_path(module, name)
+    wrapper = factory(original, *args, **kwargs)
+    apply_patch(parent, attribute, wrapper)
+    return wrapper
+
+# Function for applying a proxy object to an attribute of a class
+# instance. The wrapper works by defining an attribute of the same name
+# on the class which is a descriptor and which intercepts access to the
+# instance attribute. Note that this cannot be used on attributes which
+# are themselves defined by a property object.
+
+class AttributeWrapper(object):
+
+    def __init__(self, attribute, factory, args, kwargs):
+        self.attribute = attribute
+        self.factory = factory
+        self.args = args
+        self.kwargs = kwargs
+
+    def __get__(self, instance, owner):
+        value = instance.__dict__[self.attribute]
+        return self.factory(value, *self.args, **self.kwargs)
+
+    def __set__(self, instance, value):
+        instance.__dict__[self.attribute] = value
+
+    def __delete__(self, instance):
+        del instance.__dict__[self.attribute]
+
+def wrap_object_attribute(module, name, factory, args=(), kwargs={}):
+    path, attribute = name.rsplit('.', 1)
+    parent = resolve_path(module, path)[2]
+    wrapper = AttributeWrapper(attribute, factory, args, kwargs)
+    apply_patch(parent, attribute, wrapper)
+    return wrapper
+
+# Functions for creating a simple decorator using a FunctionWrapper,
+# plus short cut functions for applying wrappers to functions. These are
+# for use when doing monkey patching. For a more featured way of
+# creating decorators see the decorator decorator instead.
+
+def function_wrapper(wrapper):
+    def _wrapper(wrapped, instance, args, kwargs):
+        target_wrapped = args[0]
+        if instance is None:
+            target_wrapper = wrapper
+        elif inspect.isclass(instance):
+            target_wrapper = wrapper.__get__(None, instance)
+        else:
+            target_wrapper = wrapper.__get__(instance, type(instance))
+        return FunctionWrapper(target_wrapped, target_wrapper)
+    return FunctionWrapper(wrapper, _wrapper)
+
+def wrap_function_wrapper(module, name, wrapper):
+    return wrap_object(module, name, FunctionWrapper, (wrapper,))
+
+def patch_function_wrapper(module, name):
+    def _wrapper(wrapper):
+        return wrap_object(module, name, FunctionWrapper, (wrapper,))
+    return _wrapper
+
+def transient_function_wrapper(module, name):
+    def _decorator(wrapper):
+        def _wrapper(wrapped, instance, args, kwargs):
+            target_wrapped = args[0]
+            if instance is None:
+                target_wrapper = wrapper
+            elif inspect.isclass(instance):
+                target_wrapper = wrapper.__get__(None, instance)
+            else:
+                target_wrapper = wrapper.__get__(instance, type(instance))
+            def _execute(wrapped, instance, args, kwargs):
+                (parent, attribute, original) = resolve_path(module, name)
+                replacement = FunctionWrapper(original, target_wrapper)
+                setattr(parent, attribute, replacement)
+                try:
+                    return wrapped(*args, **kwargs)
+                finally:
+                    setattr(parent, attribute, original)
+            return FunctionWrapper(target_wrapped, _execute)
+        return FunctionWrapper(wrapper, _wrapper)
+    return _decorator
+
+# A weak function proxy. This will work on instance methods, class
+# methods, static methods and regular functions. Special treatment is
+# needed for the method types because the bound method is effectively a
+# transient object and applying a weak reference to one will immediately
+# result in it being destroyed and the weakref callback called. The weak
+# reference is therefore applied to the instance the method is bound to
+# and the original function. The function is then rebound at the point
+# of a call via the weak function proxy.
+
+def _weak_function_proxy_callback(ref, proxy, callback):
+    if proxy._self_expired:
+        return
+
+    proxy._self_expired = True
+
+    # This could raise an exception. We let it propagate back and let
+    # the weakref.proxy() deal with it, at which point it generally
+    # prints out a short error message direct to stderr and keeps going.
+
+    if callback is not None:
+        callback(proxy)
+
+class WeakFunctionProxy(ObjectProxy):
+
+    __slots__ = ('_self_expired', '_self_instance')
+
+    def __init__(self, wrapped, callback=None):
+        # We need to determine if the wrapped function is actually a
+        # bound method. In the case of a bound method, we need to keep a
+        # reference to the original unbound function and the instance.
+        # This is necessary because if we hold a reference to the bound
+        # function, it will be the only reference and given it is a
+        # temporary object, it will almost immediately expire and
+        # the weakref callback triggered. So what is done is that we
+        # hold a reference to the instance and unbound function and
+        # when called bind the function to the instance once again and
+        # then call it. Note that we avoid using a nested function for
+        # the callback here so as not to cause any odd reference cycles.
+
+        _callback = callback and functools.partial(
+                _weak_function_proxy_callback, proxy=self,
+                callback=callback)
+
+        self._self_expired = False
+
+        if isinstance(wrapped, _FunctionWrapperBase):
+            self._self_instance = weakref.ref(wrapped._self_instance,
+                    _callback)
+
+            if wrapped._self_parent is not None:
+                super(WeakFunctionProxy, self).__init__(
+                        weakref.proxy(wrapped._self_parent, _callback))
+
+            else:
+                super(WeakFunctionProxy, self).__init__(
+                        weakref.proxy(wrapped, _callback))
+
+            return
+
+        try:
+            self._self_instance = weakref.ref(wrapped.__self__, _callback)
+
+            super(WeakFunctionProxy, self).__init__(
+                    weakref.proxy(wrapped.__func__, _callback))
+
+        except AttributeError:
+            self._self_instance = None
+
+            super(WeakFunctionProxy, self).__init__(
+                    weakref.proxy(wrapped, _callback))
+
+    def __call__(self, *args, **kwargs):
+        # We perform a boolean check here on the instance and wrapped
+        # function as that will trigger the reference error prior to
+        # calling if the reference had expired.
+
+        instance = self._self_instance and self._self_instance()
+        function = self.__wrapped__ and self.__wrapped__
+
+        # If the wrapped function was originally a bound function, for
+        # which we retained a reference to the instance and the unbound
+        # function we need to rebind the function and then call it. If
+        # not just called the wrapped function.
+
+        if instance is None:
+            return self.__wrapped__(*args, **kwargs)
+
+        return function.__get__(instance, type(instance))(*args, **kwargs)
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/INSTALLER b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/LICENSE b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/LICENSE
new file mode 100644
index 00000000..353924be
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/METADATA b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/METADATA
new file mode 100644
index 00000000..b54d9c71
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/METADATA
@@ -0,0 +1,54 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.4.1
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=4.6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler ; extra == 'testing'
+Requires-Dist: jaraco.itertools ; extra == 'testing'
+Requires-Dist: func-timeout ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+   :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+   :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/zipp
+
+.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg
+   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+   :target: https://github.com/psf/black
+   :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+..    :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+A pathlib-compatible Zipfile object wrapper. A backport of the
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/RECORD b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/RECORD
new file mode 100644
index 00000000..150c81f3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/zipp.cpython-37.pyc,,
+zipp-3.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.4.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+zipp-3.4.1.dist-info/METADATA,sha256=ceLXh-zF008K5aguWA5dHZ20bzsRa1kwV3heimH0GXw,2087
+zipp-3.4.1.dist-info/RECORD,,
+zipp-3.4.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+zipp-3.4.1.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp.py,sha256=wMSoYxAIPgYnqJAW0JcAl5sWaIcFc5xk3dNjf6ElGgU,8089
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/WHEEL b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/WHEEL
new file mode 100644
index 00000000..385faab0
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/top_level.txt b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/top_level.txt
new file mode 100644
index 00000000..e82f676f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp-3.4.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/venv/lib/python3.7/site-packages/zipp.py b/venv/lib/python3.7/site-packages/zipp.py
new file mode 100644
index 00000000..25ef06e9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/zipp.py
@@ -0,0 +1,314 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+import pathlib
+
+if sys.version_info < (3, 7):
+    from collections import OrderedDict
+else:
+    OrderedDict = dict
+
+
+def _parents(path):
+    """
+    Given a path with elements separated by
+    posixpath.sep, generate all parents of that path.
+
+    >>> list(_parents('b/d'))
+    ['b']
+    >>> list(_parents('/b/d/'))
+    ['/b']
+    >>> list(_parents('b/d/f/'))
+    ['b/d', 'b']
+    >>> list(_parents('b'))
+    []
+    >>> list(_parents(''))
+    []
+    """
+    return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+    """
+    Given a path with elements separated by
+    posixpath.sep, generate all elements of that path
+
+    >>> list(_ancestry('b/d'))
+    ['b/d', 'b']
+    >>> list(_ancestry('/b/d/'))
+    ['/b/d', '/b']
+    >>> list(_ancestry('b/d/f/'))
+    ['b/d/f', 'b/d', 'b']
+    >>> list(_ancestry('b'))
+    ['b']
+    >>> list(_ancestry(''))
+    []
+    """
+    path = path.rstrip(posixpath.sep)
+    while path and path != posixpath.sep:
+        yield path
+        path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+    """
+    Return items in minuend not in subtrahend, retaining order
+    with O(1) lookup.
+    """
+    return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+    """
+    A ZipFile subclass that ensures that implied directories
+    are always included in the namelist.
+    """
+
+    @staticmethod
+    def _implied_dirs(names):
+        parents = itertools.chain.from_iterable(map(_parents, names))
+        as_dirs = (p + posixpath.sep for p in parents)
+        return _dedupe(_difference(as_dirs, names))
+
+    def namelist(self):
+        names = super(CompleteDirs, self).namelist()
+        return names + list(self._implied_dirs(names))
+
+    def _name_set(self):
+        return set(self.namelist())
+
+    def resolve_dir(self, name):
+        """
+        If the name represents a directory, return that name
+        as a directory (with the trailing slash).
+        """
+        names = self._name_set()
+        dirname = name + '/'
+        dir_match = name not in names and dirname in names
+        return dirname if dir_match else name
+
+    @classmethod
+    def make(cls, source):
+        """
+        Given a source (filename or zipfile), return an
+        appropriate CompleteDirs subclass.
+        """
+        if isinstance(source, CompleteDirs):
+            return source
+
+        if not isinstance(source, zipfile.ZipFile):
+            return cls(_pathlib_compat(source))
+
+        # Only allow for FastLookup when supplied zipfile is read-only
+        if 'r' not in source.mode:
+            cls = CompleteDirs
+
+        source.__class__ = cls
+        return source
+
+
+class FastLookup(CompleteDirs):
+    """
+    ZipFile subclass to ensure implicit
+    dirs exist and are resolved rapidly.
+    """
+
+    def namelist(self):
+        with contextlib.suppress(AttributeError):
+            return self.__names
+        self.__names = super(FastLookup, self).namelist()
+        return self.__names
+
+    def _name_set(self):
+        with contextlib.suppress(AttributeError):
+            return self.__lookup
+        self.__lookup = super(FastLookup, self)._name_set()
+        return self.__lookup
+
+
+def _pathlib_compat(path):
+    """
+    For path-like objects, convert to a filename for compatibility
+    on Python 3.6.1 and earlier.
+    """
+    try:
+        return path.__fspath__()
+    except AttributeError:
+        return str(path)
+
+
+class Path:
+    """
+    A pathlib-compatible interface for zip files.
+
+    Consider a zip file with this structure::
+
+        .
+        ├── a.txt
+        └── b
+            ├── c.txt
+            └── d
+                └── e.txt
+
+    >>> data = io.BytesIO()
+    >>> zf = zipfile.ZipFile(data, 'w')
+    >>> zf.writestr('a.txt', 'content of a')
+    >>> zf.writestr('b/c.txt', 'content of c')
+    >>> zf.writestr('b/d/e.txt', 'content of e')
+    >>> zf.filename = 'mem/abcde.zip'
+
+    Path accepts the zipfile object itself or a filename
+
+    >>> root = Path(zf)
+
+    From there, several path operations are available.
+
+    Directory iteration (including the zip file itself):
+
+    >>> a, b = root.iterdir()
+    >>> a
+    Path('mem/abcde.zip', 'a.txt')
+    >>> b
+    Path('mem/abcde.zip', 'b/')
+
+    name property:
+
+    >>> b.name
+    'b'
+
+    join with divide operator:
+
+    >>> c = b / 'c.txt'
+    >>> c
+    Path('mem/abcde.zip', 'b/c.txt')
+    >>> c.name
+    'c.txt'
+
+    Read text:
+
+    >>> c.read_text()
+    'content of c'
+
+    existence:
+
+    >>> c.exists()
+    True
+    >>> (b / 'missing.txt').exists()
+    False
+
+    Coercion to string:
+
+    >>> import os
+    >>> str(c).replace(os.sep, posixpath.sep)
+    'mem/abcde.zip/b/c.txt'
+
+    At the root, ``name``, ``filename``, and ``parent``
+    resolve to the zipfile. Note these attributes are not
+    valid and will raise a ``ValueError`` if the zipfile
+    has no filename.
+
+    >>> root.name
+    'abcde.zip'
+    >>> str(root.filename).replace(os.sep, posixpath.sep)
+    'mem/abcde.zip'
+    >>> str(root.parent)
+    'mem'
+    """
+
+    __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+    def __init__(self, root, at=""):
+        """
+        Construct a Path from a ZipFile or filename.
+
+        Note: When the source is an existing ZipFile object,
+        its type (__class__) will be mutated to a
+        specialized type. If the caller wishes to retain the
+        original type, the caller should either create a
+        separate ZipFile object or pass a filename.
+        """
+        self.root = FastLookup.make(root)
+        self.at = at
+
+    def open(self, mode='r', *args, pwd=None, **kwargs):
+        """
+        Open this entry as text or binary following the semantics
+        of ``pathlib.Path.open()`` by passing arguments through
+        to io.TextIOWrapper().
+        """
+        if self.is_dir():
+            raise IsADirectoryError(self)
+        zip_mode = mode[0]
+        if not self.exists() and zip_mode == 'r':
+            raise FileNotFoundError(self)
+        stream = self.root.open(self.at, zip_mode, pwd=pwd)
+        if 'b' in mode:
+            if args or kwargs:
+                raise ValueError("encoding args invalid for binary operation")
+            return stream
+        return io.TextIOWrapper(stream, *args, **kwargs)
+
+    @property
+    def name(self):
+        return pathlib.Path(self.at).name or self.filename.name
+
+    @property
+    def filename(self):
+        return pathlib.Path(self.root.filename).joinpath(self.at)
+
+    def read_text(self, *args, **kwargs):
+        with self.open('r', *args, **kwargs) as strm:
+            return strm.read()
+
+    def read_bytes(self):
+        with self.open('rb') as strm:
+            return strm.read()
+
+    def _is_child(self, path):
+        return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+    def _next(self, at):
+        return self.__class__(self.root, at)
+
+    def is_dir(self):
+        return not self.at or self.at.endswith("/")
+
+    def is_file(self):
+        return self.exists() and not self.is_dir()
+
+    def exists(self):
+        return self.at in self.root._name_set()
+
+    def iterdir(self):
+        if not self.is_dir():
+            raise ValueError("Can't listdir a file")
+        subs = map(self._next, self.root.namelist())
+        return filter(self._is_child, subs)
+
+    def __str__(self):
+        return posixpath.join(self.root.filename, self.at)
+
+    def __repr__(self):
+        return self.__repr.format(self=self)
+
+    def joinpath(self, *other):
+        next = posixpath.join(self.at, *map(_pathlib_compat, other))
+        return self._next(self.root.resolve_dir(next))
+
+    __truediv__ = joinpath
+
+    @property
+    def parent(self):
+        if not self.at:
+            return self.filename.parent
+        parent_at = posixpath.dirname(self.at.rstrip('/'))
+        if parent_at:
+            parent_at += '/'
+        return self._next(parent_at)
diff --git a/venv/pip-selfcheck.json b/venv/pip-selfcheck.json
new file mode 100644
index 00000000..6b10acd7
--- /dev/null
+++ b/venv/pip-selfcheck.json
@@ -0,0 +1 @@
+{"last_check":"2021-05-22T13:58:07Z","pypi_version":"21.1.1"}
\ No newline at end of file
-- 
GitLab